diff --git a/librarian.yaml b/librarian.yaml index 7c0eb430af62..d6a75f1026e5 100644 --- a/librarian.yaml +++ b/librarian.yaml @@ -93,7 +93,6 @@ libraries: issue_tracker_override: https://issuetracker.google.com/issues?q=componentid:190851%2B%20status:open - name: gapic-generator version: 1.30.14 - skip_generate: true python: library_type: CORE name_pretty_override: Google API Client Generator for Python diff --git a/packages/gapic-generator/.repo-metadata.json b/packages/gapic-generator/.repo-metadata.json index f13375b124da..0bbf8431ca82 100644 --- a/packages/gapic-generator/.repo-metadata.json +++ b/packages/gapic-generator/.repo-metadata.json @@ -1,14 +1,11 @@ { - "name": "gapic-generator", - "name_pretty": "Google API Client Generator for Python", "client_documentation": "https://gapic-generator-python.readthedocs.io/en/stable/", - "product_documentation": "", + "distribution_name": "gapic-generator", "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", - "release_level": "stable", "language": "python", "library_type": "CORE", - "repo": "googleapis/google-cloud-python", - "distribution_name": "gapic-generator", - "default_version": "", - "codeowner_team": "@googleapis/cloud-sdk-python-team" -} + "name": "gapic-generator", + "name_pretty": "Google API Client Generator for Python", + "release_level": "stable", + "repo": "googleapis/google-cloud-python" +} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py index b2fe8eec9e8a..3439ac33de8b 100644 --- a/packages/gapic-generator/gapic/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -17,7 +17,6 @@ import typing import click - from google.protobuf.compiler import plugin_pb2 from gapic import generator diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py index bf9fc1049317..605b0ce4160a 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py @@ -18,8 +18,7 @@ import inflection import libcst -from gapic.configurable_snippetgen import libcst_utils -from gapic.configurable_snippetgen import snippet_config_language_pb2 +from gapic.configurable_snippetgen import libcst_utils, snippet_config_language_pb2 from gapic.schema import api diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.py b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.py index 938060d5eaf5..03867a1f2aa6 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.py @@ -17,28 +17,31 @@ # source: snippet_config_language.proto # type: ignore """Generated protocol buffer code.""" -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 -from google.protobuf.internal import builder as _builder + from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x1dsnippet_config_language.proto\x12/google.cloud.tools.snippetgen.configlanguage.v1\x1a google/protobuf/descriptor.proto\"\xcd\x02\n\rSnippetConfig\x12X\n\x08metadata\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.SnippetConfigMetadata\x12\x41\n\x03rpc\x18\x02 \x01(\x0b\x32\x34.google.cloud.tools.snippetgen.configlanguage.v1.Rpc\x12T\n\tsignature\x18\x03 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.SnippetSignature\x12I\n\x07snippet\x18\x04 \x01(\x0b\x32\x38.google.cloud.tools.snippetgen.configlanguage.v1.Snippet\"\xd3\x01\n\x15SnippetConfigMetadata\x12\x0f\n\x07skipped\x18\x01 \x01(\x08\x12\x63\n\x11skipped_languages\x18\x02 \x03(\x0e\x32H.google.cloud.tools.snippetgen.configlanguage.v1.GeneratorOutputLanguage\x12\x11\n\tconfig_id\x18\x03 \x01(\t\x12\x14\n\x0csnippet_name\x18\x04 \x01(\t\x12\x1b\n\x13snippet_description\x18\x05 \x01(\t\"Y\n\x03Rpc\x12\x15\n\rproto_package\x18\x01 \x01(\t\x12\x13\n\x0b\x61pi_version\x18\x02 \x03(\t\x12\x14\n\x0cservice_name\x18\x03 \x01(\t\x12\x10\n\x08rpc_name\x18\x04 \x01(\t\"\x99\x03\n\x10SnippetSignature\x12\x1b\n\x13snippet_method_name\x18\x01 \x01(\t\x12J\n\x0breturn_type\x18\x02 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12i\n\x0fsync_preference\x18\x03 \x01(\x0e\x32P.google.cloud.tools.snippetgen.configlanguage.v1.SnippetSignature.SyncPreference\x12Z\n\nparameters\x18\x04 \x03(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\"U\n\x0eSyncPreference\x12\x16\n\x12LANGUAGE_PREFERRED\x10\x00\x12\x10\n\x0cPREFER_ASYNC\x10\x01\x12\x0f\n\x0bPREFER_SYNC\x10\x02\x12\x08\n\x04\x42OTH\x10\x03\"\x91,\n\x07Snippet\x12t\n\x1dservice_client_initialization\x18\x01 \x01(\x0b\x32M.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientInitialization\x12U\n\x08standard\x18\x02 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StandardH\x00\x12W\n\tpaginated\x18\x03 \x01(\x0b\x32\x42.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedH\x00\x12K\n\x03lro\x18\x04 \x01(\x0b\x32<.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.LroH\x00\x12\x64\n\x10\x63lient_streaming\x18\x05 \x01(\x0b\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientStreamingH\x00\x12\x64\n\x10server_streaming\x18\x06 \x01(\x0b\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ServerStreamingH\x00\x12`\n\x0e\x62idi_streaming\x18\x07 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.BidiStreamingH\x00\x12T\n\x10\x66inal_statements\x18\x08 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xff\x03\n\x14\x43lientInitialization\x12]\n\x19pre_client_initialization\x18\x01 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12~\n\x17\x63ustom_service_endpoint\x18\x02 \x01(\x0b\x32].google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientInitialization.ServiceEndpoint\x1a\x87\x02\n\x0fServiceEndpoint\x12\x83\x01\n\x06schema\x18\x01 \x01(\x0e\x32s.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientInitialization.ServiceEndpoint.ServiceEndpointSchema\x12\x0c\n\x04host\x18\x02 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0c\n\x04port\x18\x04 \x01(\x05\"B\n\x15ServiceEndpointSchema\x12\x14\n\x10LANGUAGE_DEFAULT\x10\x00\x12\t\n\x05HTTPS\x10\x01\x12\x08\n\x04HTTP\x10\x02\x1a\xbf\x02\n\x08Standard\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12Q\n\x04\x63\x61ll\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12j\n\x11response_handling\x18\x03 \x01(\x0b\x32O.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleResponseHandling\x1a\xce\x02\n\tPaginated\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12[\n\x0epaginated_call\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12n\n\x12paginated_handling\x18\x03 \x01(\x0b\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling\x1a\xb2\x02\n\x03Lro\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12Q\n\x04\x63\x61ll\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x62\n\x0clro_handling\x18\x03 \x01(\x0b\x32L.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.LroResponseHandling\x1a\xf4\x02\n\x0f\x43lientStreaming\x12`\n\x13initialization_call\x18\x01 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x1a\n\x12\x63lient_stream_name\x18\x02 \x01(\t\x12w\n\x16request_initialization\x18\x03 \x01(\x0b\x32W.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingRequestInitialization\x12j\n\x11response_handling\x18\x04 \x01(\x0b\x32O.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleResponseHandling\x1a\xf4\x02\n\x0fServerStreaming\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12`\n\x13initialization_call\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x1a\n\x12server_stream_name\x18\x03 \x01(\t\x12m\n\x11response_handling\x18\x04 \x01(\x0b\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingResponseHandling\x1a\x91\x03\n\rBidiStreaming\x12`\n\x13initialization_call\x18\x01 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x1a\n\x12\x63lient_stream_name\x18\x02 \x01(\t\x12w\n\x16request_initialization\x18\x03 \x01(\x0b\x32W.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingRequestInitialization\x12\x1a\n\x12server_stream_name\x18\x04 \x01(\t\x12m\n\x11response_handling\x18\x05 \x01(\x0b\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingResponseHandling\x1aZ\n\nClientCall\x12L\n\x08pre_call\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xe7\x01\n\x1bSimpleRequestInitialization\x12^\n\x1apre_request_initialization\x18\x01 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12R\n\rrequest_value\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12\x14\n\x0crequest_name\x18\x03 \x01(\t\x1a\xe1\x02\n\x1eStreamingRequestInitialization\x12u\n\x17\x66irst_streaming_request\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12W\n\titeration\x18\x03 \x01(\x0b\x32\x44.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration\x12o\n\x11streaming_request\x18\x04 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x1a/\n\x16SimpleResponseHandling\x12\x15\n\rresponse_name\x18\x01 \x01(\t\x1a\xec\x07\n\x19PaginatedResponseHandling\x12\x15\n\rresponse_name\x18\x01 \x01(\t\x12l\n\x07\x62y_item\x18\x02 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByItemH\x00\x12l\n\x07\x62y_page\x18\x03 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByPageH\x00\x12{\n\x0fnext_page_token\x18\x04 \x01(\x0b\x32`.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.NextPageTokenH\x00\x1at\n\x06\x42yItem\x12\x11\n\titem_name\x18\x01 \x01(\t\x12W\n\x13per_item_statements\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xe0\x01\n\x06\x42yPage\x12\x11\n\tpage_name\x18\x01 \x01(\t\x12W\n\x13per_page_statements\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12j\n\x07\x62y_item\x18\x03 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByItem\x1a\xf2\x01\n\rNextPageToken\x12\x1c\n\x14next_page_token_name\x18\x01 \x01(\t\x12W\n\x12\x65xplicit_page_size\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12j\n\x07\x62y_page\x18\x03 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByPageB\x11\n\x0fpagination_kind\x1a\xcf\x02\n\x13LroResponseHandling\x12\x15\n\rresponse_name\x18\x01 \x01(\t\x12n\n\x0cpolling_type\x18\x02 \x01(\x0e\x32X.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.LroResponseHandling.PollingType\x12\x1d\n\x15polling_response_name\x18\x03 \x01(\t\x12Y\n\x0cpolling_call\x18\x04 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\"7\n\x0bPollingType\x12\x14\n\x10UNTIL_COMPLETION\x10\x00\x12\x08\n\x04ONCE\x10\x01\x12\x08\n\x04NONE\x10\x02\x1a\x9e\x01\n\x19StreamingResponseHandling\x12\x1d\n\x15\x63urrent_response_name\x18\x01 \x01(\t\x12\x62\n\x1eper_stream_response_statements\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.StatementB\x06\n\x04\x63\x61ll\"\xc5\x18\n\tStatement\x12]\n\x0b\x64\x65\x63laration\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.DeclarationH\x00\x12\x64\n\x0fstandard_output\x18\x02 \x01(\x0b\x32I.google.cloud.tools.snippetgen.configlanguage.v1.Statement.StandardOutputH\x00\x12S\n\x06return\x18\x03 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.Statement.ReturnH\x00\x12]\n\x0b\x63onditional\x18\x04 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.ConditionalH\x00\x12Y\n\titeration\x18\x05 \x01(\x0b\x32\x44.google.cloud.tools.snippetgen.configlanguage.v1.Statement.IterationH\x00\x1a\xc1\x01\n\x0b\x44\x65\x63laration\x12\x43\n\x04type\x18\x01 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12\x0c\n\x04name\x18\x02 \x01(\t\x12J\n\x05value\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x1a\\\n\x0eStandardOutput\x12J\n\x05value\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1aU\n\x06Return\x12K\n\x06result\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1a\xf8\x01\n\x0b\x43onditional\x12N\n\tcondition\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12K\n\x07on_true\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12L\n\x08on_false\x18\x03 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xdd\x0f\n\tIteration\x12\x83\x01\n\x1anumeric_sequence_iteration\x18\x01 \x01(\x0b\x32].google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.NumericSequenceIterationH\x00\x12t\n\x12repeated_iteration\x18\x02 \x01(\x0b\x32V.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.RepeatedIterationH\x00\x12j\n\rmap_iteration\x18\x03 \x01(\x0b\x32Q.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.MapIterationH\x00\x12n\n\x0f\x62ytes_iteration\x18\x04 \x01(\x0b\x32S.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.BytesIterationH\x00\x12N\n\nstatements\x18\x05 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xdc\x05\n\x18NumericSequenceIteration\x12X\n\x08start_at\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12P\n\tincrement\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12Q\n\nmultiplier\x18\x04 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12Y\n\x12less_than_or_equal\x18\x07 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12P\n\tless_than\x18\x08 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12\\\n\x15greater_than_or_equal\x18\t \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12S\n\x0cgreater_than\x18\n \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12R\n\x0btotal_steps\x18\x0b \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x42\x06\n\x04stepB\x05\n\x03\x65nd\x1a\x8c\x01\n\x11RepeatedIteration\x12\x61\n\x11repeated_elements\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12\x14\n\x0c\x63urrent_name\x18\x02 \x01(\t\x1a\x99\x01\n\x0cMapIteration\x12S\n\x03map\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12\x18\n\x10\x63urrent_key_name\x18\x02 \x01(\t\x12\x1a\n\x12\x63urrent_value_name\x18\x03 \x01(\t\x1a\x8b\x03\n\x0e\x42ytesIteration\x12]\n\rbyte_sequence\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12Q\n\nchunk_size\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12S\n\x0ctotal_chunks\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12S\n\nchunk_type\x18\x04 \x01(\x0b\x32?.google.cloud.tools.snippetgen.configlanguage.v1.Type.BytesType\x12\x14\n\x0c\x63urrent_name\x18\x05 \x01(\tB\x07\n\x05\x63hunkB\x10\n\x0eiteration_typeB\x10\n\x0estatement_type\"\xb6\r\n\x04Type\x12W\n\x0bscalar_type\x18\x01 \x01(\x0e\x32@.google.cloud.tools.snippetgen.configlanguage.v1.Type.ScalarTypeH\x00\x12S\n\tenum_type\x18\x02 \x01(\x0b\x32>.google.cloud.tools.snippetgen.configlanguage.v1.Type.EnumTypeH\x00\x12U\n\nbytes_type\x18\x03 \x01(\x0b\x32?.google.cloud.tools.snippetgen.configlanguage.v1.Type.BytesTypeH\x00\x12Y\n\x0cmessage_type\x18\x04 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.Type.MessageTypeH\x00\x12[\n\rrepeated_type\x18\x05 \x01(\x0b\x32\x42.google.cloud.tools.snippetgen.configlanguage.v1.Type.RepeatedTypeH\x00\x12Q\n\x08map_type\x18\x06 \x01(\x0b\x32=.google.cloud.tools.snippetgen.configlanguage.v1.Type.MapTypeH\x00\x1a\"\n\x08\x45numType\x12\x16\n\x0e\x65num_full_name\x18\x01 \x01(\t\x1a\xce\x01\n\tBytesType\x12o\n\x13language_equivalent\x18\x01 \x01(\x0e\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Type.BytesType.LanguageEquivalent\"P\n\x12LanguageEquivalent\x12\x12\n\x0ePROTOBUF_BYTES\x10\x00\x12\n\n\x06\x42\x41SE64\x10\x01\x12\x0e\n\nBYTE_ARRAY\x10\x02\x12\n\n\x06STREAM\x10\x03\x1a(\n\x0bMessageType\x12\x19\n\x11message_full_name\x18\x01 \x01(\t\x1a\x91\x02\n\x0cRepeatedType\x12K\n\x0c\x65lement_type\x18\x01 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12r\n\x13language_equivalent\x18\x02 \x01(\x0e\x32U.google.cloud.tools.snippetgen.configlanguage.v1.Type.RepeatedType.LanguageEquivalent\"@\n\x12LanguageEquivalent\x12\x15\n\x11PROTOBUF_REPEATED\x10\x00\x12\t\n\x05\x41RRAY\x10\x01\x12\x08\n\x04LIST\x10\x02\x1a\xc4\x02\n\x07MapType\x12G\n\x08key_type\x18\x01 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12I\n\nvalue_type\x18\x02 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12m\n\x13language_equivalent\x18\x03 \x01(\x0e\x32P.google.cloud.tools.snippetgen.configlanguage.v1.Type.MapType.LanguageEquivalent\"6\n\x12LanguageEquivalent\x12\x10\n\x0cPROTOBUF_MAP\x10\x00\x12\x0e\n\nDICTIONARY\x10\x01\"\x96\x02\n\nScalarType\x12\x19\n\x15SCALAR_TYPE_UNDEFINED\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\x42\x0b\n\ttype_kind\"\xa8\x10\n\nExpression\x12[\n\nnull_value\x18\x01 \x01(\x0e\x32\x45.google.cloud.tools.snippetgen.configlanguage.v1.Expression.NullValueH\x00\x12\x61\n\rdefault_value\x18\x02 \x01(\x0e\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Expression.DefaultValueH\x00\x12[\n\nname_value\x18\x03 \x01(\x0b\x32\x45.google.cloud.tools.snippetgen.configlanguage.v1.Expression.NameValueH\x00\x12\x16\n\x0cnumber_value\x18\x04 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x05 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12\x14\n\nenum_value\x18\x07 \x01(\tH\x00\x12]\n\x0b\x62ytes_value\x18\x08 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Expression.BytesValueH\x00\x12\x61\n\rcomplex_value\x18\t \x01(\x0b\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Expression.ComplexValueH\x00\x12_\n\nlist_value\x18\n \x01(\x0b\x32I.google.cloud.tools.snippetgen.configlanguage.v1.Expression.RepeatedValueH\x00\x12Y\n\tmap_value\x18\x0b \x01(\x0b\x32\x44.google.cloud.tools.snippetgen.configlanguage.v1.Expression.MapValueH\x00\x12l\n\x11\x63onditional_value\x18\x0c \x01(\x0b\x32O.google.cloud.tools.snippetgen.configlanguage.v1.Expression.ConditionalOperatorH\x00\x12\x13\n\x0b\x64\x65scription\x18\r \x01(\t\x1a\'\n\tNameValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x03(\t\x1a\xb3\x02\n\nBytesValue\x12T\n\rbase64_string\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12h\n\x0b\x66ile_stream\x18\x02 \x01(\x0b\x32Q.google.cloud.tools.snippetgen.configlanguage.v1.Expression.BytesValue.FileStreamH\x00\x1a\\\n\nFileStream\x12N\n\tfile_path\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionB\x07\n\x05value\x1a\xec\x01\n\x0c\x43omplexValue\x12l\n\nproperties\x18\x01 \x03(\x0b\x32X.google.cloud.tools.snippetgen.configlanguage.v1.Expression.ComplexValue.PropertiesEntry\x1an\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12J\n\x05value\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression:\x02\x38\x01\x1a\\\n\rRepeatedValue\x12K\n\x06values\x18\x01 \x03(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1a\xa2\x01\n\x08MapValue\x12I\n\x04keys\x18\x01 \x03(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12K\n\x06values\x18\x02 \x03(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1a\x82\x02\n\x13\x43onditionalOperator\x12N\n\tcondition\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12L\n\x07on_true\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12M\n\x08on_false\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\"\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\"!\n\x0c\x44\x65\x66\x61ultValue\x12\x11\n\rDEFAULT_VALUE\x10\x00\x42\x07\n\x05value*\xa3\x01\n\x17GeneratorOutputLanguage\x12)\n%GENERATOR_OUTPUT_LANGUAGE_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43_PLUS_PLUS\x10\x01\x12\x0b\n\x07\x43_SHARP\x10\x02\x12\x06\n\x02GO\x10\x03\x12\x08\n\x04JAVA\x10\x04\x12\x0e\n\nJAVASCRIPT\x10\x05\x12\x07\n\x03PHP\x10\x06\x12\n\n\x06PYTHON\x10\x07\x12\x08\n\x04RUBY\x10\x08\x42\xee\x01\n3com.google.cloud.tools.snippetgen.configlanguage.v1B\x1aSnippetConfigLanguageProtoP\x01\xaa\x02/Google.Cloud.Tools.SnippetGen.ConfigLanguage.V1\xca\x02/Google\\Cloud\\Tools\\SnippetGen\\ConfigLanguage\\V1\xea\x02\x34Google::Cloud::Tools::SnippetGen::ConfigLanguage::V1b\x06proto3') + b'\n\x1dsnippet_config_language.proto\x12/google.cloud.tools.snippetgen.configlanguage.v1\x1a google/protobuf/descriptor.proto"\xcd\x02\n\rSnippetConfig\x12X\n\x08metadata\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.SnippetConfigMetadata\x12\x41\n\x03rpc\x18\x02 \x01(\x0b\x32\x34.google.cloud.tools.snippetgen.configlanguage.v1.Rpc\x12T\n\tsignature\x18\x03 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.SnippetSignature\x12I\n\x07snippet\x18\x04 \x01(\x0b\x32\x38.google.cloud.tools.snippetgen.configlanguage.v1.Snippet"\xd3\x01\n\x15SnippetConfigMetadata\x12\x0f\n\x07skipped\x18\x01 \x01(\x08\x12\x63\n\x11skipped_languages\x18\x02 \x03(\x0e\x32H.google.cloud.tools.snippetgen.configlanguage.v1.GeneratorOutputLanguage\x12\x11\n\tconfig_id\x18\x03 \x01(\t\x12\x14\n\x0csnippet_name\x18\x04 \x01(\t\x12\x1b\n\x13snippet_description\x18\x05 \x01(\t"Y\n\x03Rpc\x12\x15\n\rproto_package\x18\x01 \x01(\t\x12\x13\n\x0b\x61pi_version\x18\x02 \x03(\t\x12\x14\n\x0cservice_name\x18\x03 \x01(\t\x12\x10\n\x08rpc_name\x18\x04 \x01(\t"\x99\x03\n\x10SnippetSignature\x12\x1b\n\x13snippet_method_name\x18\x01 \x01(\t\x12J\n\x0breturn_type\x18\x02 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12i\n\x0fsync_preference\x18\x03 \x01(\x0e\x32P.google.cloud.tools.snippetgen.configlanguage.v1.SnippetSignature.SyncPreference\x12Z\n\nparameters\x18\x04 \x03(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration"U\n\x0eSyncPreference\x12\x16\n\x12LANGUAGE_PREFERRED\x10\x00\x12\x10\n\x0cPREFER_ASYNC\x10\x01\x12\x0f\n\x0bPREFER_SYNC\x10\x02\x12\x08\n\x04\x42OTH\x10\x03"\x91,\n\x07Snippet\x12t\n\x1dservice_client_initialization\x18\x01 \x01(\x0b\x32M.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientInitialization\x12U\n\x08standard\x18\x02 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StandardH\x00\x12W\n\tpaginated\x18\x03 \x01(\x0b\x32\x42.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedH\x00\x12K\n\x03lro\x18\x04 \x01(\x0b\x32<.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.LroH\x00\x12\x64\n\x10\x63lient_streaming\x18\x05 \x01(\x0b\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientStreamingH\x00\x12\x64\n\x10server_streaming\x18\x06 \x01(\x0b\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ServerStreamingH\x00\x12`\n\x0e\x62idi_streaming\x18\x07 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.BidiStreamingH\x00\x12T\n\x10\x66inal_statements\x18\x08 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xff\x03\n\x14\x43lientInitialization\x12]\n\x19pre_client_initialization\x18\x01 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12~\n\x17\x63ustom_service_endpoint\x18\x02 \x01(\x0b\x32].google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientInitialization.ServiceEndpoint\x1a\x87\x02\n\x0fServiceEndpoint\x12\x83\x01\n\x06schema\x18\x01 \x01(\x0e\x32s.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientInitialization.ServiceEndpoint.ServiceEndpointSchema\x12\x0c\n\x04host\x18\x02 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0c\n\x04port\x18\x04 \x01(\x05"B\n\x15ServiceEndpointSchema\x12\x14\n\x10LANGUAGE_DEFAULT\x10\x00\x12\t\n\x05HTTPS\x10\x01\x12\x08\n\x04HTTP\x10\x02\x1a\xbf\x02\n\x08Standard\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12Q\n\x04\x63\x61ll\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12j\n\x11response_handling\x18\x03 \x01(\x0b\x32O.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleResponseHandling\x1a\xce\x02\n\tPaginated\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12[\n\x0epaginated_call\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12n\n\x12paginated_handling\x18\x03 \x01(\x0b\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling\x1a\xb2\x02\n\x03Lro\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12Q\n\x04\x63\x61ll\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x62\n\x0clro_handling\x18\x03 \x01(\x0b\x32L.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.LroResponseHandling\x1a\xf4\x02\n\x0f\x43lientStreaming\x12`\n\x13initialization_call\x18\x01 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x1a\n\x12\x63lient_stream_name\x18\x02 \x01(\t\x12w\n\x16request_initialization\x18\x03 \x01(\x0b\x32W.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingRequestInitialization\x12j\n\x11response_handling\x18\x04 \x01(\x0b\x32O.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleResponseHandling\x1a\xf4\x02\n\x0fServerStreaming\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12`\n\x13initialization_call\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x1a\n\x12server_stream_name\x18\x03 \x01(\t\x12m\n\x11response_handling\x18\x04 \x01(\x0b\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingResponseHandling\x1a\x91\x03\n\rBidiStreaming\x12`\n\x13initialization_call\x18\x01 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x1a\n\x12\x63lient_stream_name\x18\x02 \x01(\t\x12w\n\x16request_initialization\x18\x03 \x01(\x0b\x32W.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingRequestInitialization\x12\x1a\n\x12server_stream_name\x18\x04 \x01(\t\x12m\n\x11response_handling\x18\x05 \x01(\x0b\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingResponseHandling\x1aZ\n\nClientCall\x12L\n\x08pre_call\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xe7\x01\n\x1bSimpleRequestInitialization\x12^\n\x1apre_request_initialization\x18\x01 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12R\n\rrequest_value\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12\x14\n\x0crequest_name\x18\x03 \x01(\t\x1a\xe1\x02\n\x1eStreamingRequestInitialization\x12u\n\x17\x66irst_streaming_request\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12W\n\titeration\x18\x03 \x01(\x0b\x32\x44.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration\x12o\n\x11streaming_request\x18\x04 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x1a/\n\x16SimpleResponseHandling\x12\x15\n\rresponse_name\x18\x01 \x01(\t\x1a\xec\x07\n\x19PaginatedResponseHandling\x12\x15\n\rresponse_name\x18\x01 \x01(\t\x12l\n\x07\x62y_item\x18\x02 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByItemH\x00\x12l\n\x07\x62y_page\x18\x03 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByPageH\x00\x12{\n\x0fnext_page_token\x18\x04 \x01(\x0b\x32`.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.NextPageTokenH\x00\x1at\n\x06\x42yItem\x12\x11\n\titem_name\x18\x01 \x01(\t\x12W\n\x13per_item_statements\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xe0\x01\n\x06\x42yPage\x12\x11\n\tpage_name\x18\x01 \x01(\t\x12W\n\x13per_page_statements\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12j\n\x07\x62y_item\x18\x03 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByItem\x1a\xf2\x01\n\rNextPageToken\x12\x1c\n\x14next_page_token_name\x18\x01 \x01(\t\x12W\n\x12\x65xplicit_page_size\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12j\n\x07\x62y_page\x18\x03 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByPageB\x11\n\x0fpagination_kind\x1a\xcf\x02\n\x13LroResponseHandling\x12\x15\n\rresponse_name\x18\x01 \x01(\t\x12n\n\x0cpolling_type\x18\x02 \x01(\x0e\x32X.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.LroResponseHandling.PollingType\x12\x1d\n\x15polling_response_name\x18\x03 \x01(\t\x12Y\n\x0cpolling_call\x18\x04 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall"7\n\x0bPollingType\x12\x14\n\x10UNTIL_COMPLETION\x10\x00\x12\x08\n\x04ONCE\x10\x01\x12\x08\n\x04NONE\x10\x02\x1a\x9e\x01\n\x19StreamingResponseHandling\x12\x1d\n\x15\x63urrent_response_name\x18\x01 \x01(\t\x12\x62\n\x1eper_stream_response_statements\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.StatementB\x06\n\x04\x63\x61ll"\xc5\x18\n\tStatement\x12]\n\x0b\x64\x65\x63laration\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.DeclarationH\x00\x12\x64\n\x0fstandard_output\x18\x02 \x01(\x0b\x32I.google.cloud.tools.snippetgen.configlanguage.v1.Statement.StandardOutputH\x00\x12S\n\x06return\x18\x03 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.Statement.ReturnH\x00\x12]\n\x0b\x63onditional\x18\x04 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.ConditionalH\x00\x12Y\n\titeration\x18\x05 \x01(\x0b\x32\x44.google.cloud.tools.snippetgen.configlanguage.v1.Statement.IterationH\x00\x1a\xc1\x01\n\x0b\x44\x65\x63laration\x12\x43\n\x04type\x18\x01 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12\x0c\n\x04name\x18\x02 \x01(\t\x12J\n\x05value\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x1a\\\n\x0eStandardOutput\x12J\n\x05value\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1aU\n\x06Return\x12K\n\x06result\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1a\xf8\x01\n\x0b\x43onditional\x12N\n\tcondition\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12K\n\x07on_true\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12L\n\x08on_false\x18\x03 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xdd\x0f\n\tIteration\x12\x83\x01\n\x1anumeric_sequence_iteration\x18\x01 \x01(\x0b\x32].google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.NumericSequenceIterationH\x00\x12t\n\x12repeated_iteration\x18\x02 \x01(\x0b\x32V.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.RepeatedIterationH\x00\x12j\n\rmap_iteration\x18\x03 \x01(\x0b\x32Q.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.MapIterationH\x00\x12n\n\x0f\x62ytes_iteration\x18\x04 \x01(\x0b\x32S.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.BytesIterationH\x00\x12N\n\nstatements\x18\x05 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xdc\x05\n\x18NumericSequenceIteration\x12X\n\x08start_at\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12P\n\tincrement\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12Q\n\nmultiplier\x18\x04 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12Y\n\x12less_than_or_equal\x18\x07 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12P\n\tless_than\x18\x08 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12\\\n\x15greater_than_or_equal\x18\t \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12S\n\x0cgreater_than\x18\n \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12R\n\x0btotal_steps\x18\x0b \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x42\x06\n\x04stepB\x05\n\x03\x65nd\x1a\x8c\x01\n\x11RepeatedIteration\x12\x61\n\x11repeated_elements\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12\x14\n\x0c\x63urrent_name\x18\x02 \x01(\t\x1a\x99\x01\n\x0cMapIteration\x12S\n\x03map\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12\x18\n\x10\x63urrent_key_name\x18\x02 \x01(\t\x12\x1a\n\x12\x63urrent_value_name\x18\x03 \x01(\t\x1a\x8b\x03\n\x0e\x42ytesIteration\x12]\n\rbyte_sequence\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12Q\n\nchunk_size\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12S\n\x0ctotal_chunks\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12S\n\nchunk_type\x18\x04 \x01(\x0b\x32?.google.cloud.tools.snippetgen.configlanguage.v1.Type.BytesType\x12\x14\n\x0c\x63urrent_name\x18\x05 \x01(\tB\x07\n\x05\x63hunkB\x10\n\x0eiteration_typeB\x10\n\x0estatement_type"\xb6\r\n\x04Type\x12W\n\x0bscalar_type\x18\x01 \x01(\x0e\x32@.google.cloud.tools.snippetgen.configlanguage.v1.Type.ScalarTypeH\x00\x12S\n\tenum_type\x18\x02 \x01(\x0b\x32>.google.cloud.tools.snippetgen.configlanguage.v1.Type.EnumTypeH\x00\x12U\n\nbytes_type\x18\x03 \x01(\x0b\x32?.google.cloud.tools.snippetgen.configlanguage.v1.Type.BytesTypeH\x00\x12Y\n\x0cmessage_type\x18\x04 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.Type.MessageTypeH\x00\x12[\n\rrepeated_type\x18\x05 \x01(\x0b\x32\x42.google.cloud.tools.snippetgen.configlanguage.v1.Type.RepeatedTypeH\x00\x12Q\n\x08map_type\x18\x06 \x01(\x0b\x32=.google.cloud.tools.snippetgen.configlanguage.v1.Type.MapTypeH\x00\x1a"\n\x08\x45numType\x12\x16\n\x0e\x65num_full_name\x18\x01 \x01(\t\x1a\xce\x01\n\tBytesType\x12o\n\x13language_equivalent\x18\x01 \x01(\x0e\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Type.BytesType.LanguageEquivalent"P\n\x12LanguageEquivalent\x12\x12\n\x0ePROTOBUF_BYTES\x10\x00\x12\n\n\x06\x42\x41SE64\x10\x01\x12\x0e\n\nBYTE_ARRAY\x10\x02\x12\n\n\x06STREAM\x10\x03\x1a(\n\x0bMessageType\x12\x19\n\x11message_full_name\x18\x01 \x01(\t\x1a\x91\x02\n\x0cRepeatedType\x12K\n\x0c\x65lement_type\x18\x01 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12r\n\x13language_equivalent\x18\x02 \x01(\x0e\x32U.google.cloud.tools.snippetgen.configlanguage.v1.Type.RepeatedType.LanguageEquivalent"@\n\x12LanguageEquivalent\x12\x15\n\x11PROTOBUF_REPEATED\x10\x00\x12\t\n\x05\x41RRAY\x10\x01\x12\x08\n\x04LIST\x10\x02\x1a\xc4\x02\n\x07MapType\x12G\n\x08key_type\x18\x01 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12I\n\nvalue_type\x18\x02 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12m\n\x13language_equivalent\x18\x03 \x01(\x0e\x32P.google.cloud.tools.snippetgen.configlanguage.v1.Type.MapType.LanguageEquivalent"6\n\x12LanguageEquivalent\x12\x10\n\x0cPROTOBUF_MAP\x10\x00\x12\x0e\n\nDICTIONARY\x10\x01"\x96\x02\n\nScalarType\x12\x19\n\x15SCALAR_TYPE_UNDEFINED\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\x42\x0b\n\ttype_kind"\xa8\x10\n\nExpression\x12[\n\nnull_value\x18\x01 \x01(\x0e\x32\x45.google.cloud.tools.snippetgen.configlanguage.v1.Expression.NullValueH\x00\x12\x61\n\rdefault_value\x18\x02 \x01(\x0e\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Expression.DefaultValueH\x00\x12[\n\nname_value\x18\x03 \x01(\x0b\x32\x45.google.cloud.tools.snippetgen.configlanguage.v1.Expression.NameValueH\x00\x12\x16\n\x0cnumber_value\x18\x04 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x05 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12\x14\n\nenum_value\x18\x07 \x01(\tH\x00\x12]\n\x0b\x62ytes_value\x18\x08 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Expression.BytesValueH\x00\x12\x61\n\rcomplex_value\x18\t \x01(\x0b\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Expression.ComplexValueH\x00\x12_\n\nlist_value\x18\n \x01(\x0b\x32I.google.cloud.tools.snippetgen.configlanguage.v1.Expression.RepeatedValueH\x00\x12Y\n\tmap_value\x18\x0b \x01(\x0b\x32\x44.google.cloud.tools.snippetgen.configlanguage.v1.Expression.MapValueH\x00\x12l\n\x11\x63onditional_value\x18\x0c \x01(\x0b\x32O.google.cloud.tools.snippetgen.configlanguage.v1.Expression.ConditionalOperatorH\x00\x12\x13\n\x0b\x64\x65scription\x18\r \x01(\t\x1a\'\n\tNameValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x03(\t\x1a\xb3\x02\n\nBytesValue\x12T\n\rbase64_string\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12h\n\x0b\x66ile_stream\x18\x02 \x01(\x0b\x32Q.google.cloud.tools.snippetgen.configlanguage.v1.Expression.BytesValue.FileStreamH\x00\x1a\\\n\nFileStream\x12N\n\tfile_path\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionB\x07\n\x05value\x1a\xec\x01\n\x0c\x43omplexValue\x12l\n\nproperties\x18\x01 \x03(\x0b\x32X.google.cloud.tools.snippetgen.configlanguage.v1.Expression.ComplexValue.PropertiesEntry\x1an\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12J\n\x05value\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression:\x02\x38\x01\x1a\\\n\rRepeatedValue\x12K\n\x06values\x18\x01 \x03(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1a\xa2\x01\n\x08MapValue\x12I\n\x04keys\x18\x01 \x03(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12K\n\x06values\x18\x02 \x03(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1a\x82\x02\n\x13\x43onditionalOperator\x12N\n\tcondition\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12L\n\x07on_true\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12M\n\x08on_false\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression"\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00"!\n\x0c\x44\x65\x66\x61ultValue\x12\x11\n\rDEFAULT_VALUE\x10\x00\x42\x07\n\x05value*\xa3\x01\n\x17GeneratorOutputLanguage\x12)\n%GENERATOR_OUTPUT_LANGUAGE_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43_PLUS_PLUS\x10\x01\x12\x0b\n\x07\x43_SHARP\x10\x02\x12\x06\n\x02GO\x10\x03\x12\x08\n\x04JAVA\x10\x04\x12\x0e\n\nJAVASCRIPT\x10\x05\x12\x07\n\x03PHP\x10\x06\x12\n\n\x06PYTHON\x10\x07\x12\x08\n\x04RUBY\x10\x08\x42\xee\x01\n3com.google.cloud.tools.snippetgen.configlanguage.v1B\x1aSnippetConfigLanguageProtoP\x01\xaa\x02/Google.Cloud.Tools.SnippetGen.ConfigLanguage.V1\xca\x02/Google\\Cloud\\Tools\\SnippetGen\\ConfigLanguage\\V1\xea\x02\x34Google::Cloud::Tools::SnippetGen::ConfigLanguage::V1b\x06proto3' +) _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, 'snippet_config_language_pb2', globals()) + DESCRIPTOR, "snippet_config_language_pb2", globals() +) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n3com.google.cloud.tools.snippetgen.configlanguage.v1B\032SnippetConfigLanguageProtoP\001\252\002/Google.Cloud.Tools.SnippetGen.ConfigLanguage.V1\312\002/Google\\Cloud\\Tools\\SnippetGen\\ConfigLanguage\\V1\352\0024Google::Cloud::Tools::SnippetGen::ConfigLanguage::V1' + DESCRIPTOR._serialized_options = b"\n3com.google.cloud.tools.snippetgen.configlanguage.v1B\032SnippetConfigLanguageProtoP\001\252\002/Google.Cloud.Tools.SnippetGen.ConfigLanguage.V1\312\002/Google\\Cloud\\Tools\\SnippetGen\\ConfigLanguage\\V1\352\0024Google::Cloud::Tools::SnippetGen::ConfigLanguage::V1" _EXPRESSION_COMPLEXVALUE_PROPERTIESENTRY._options = None - _EXPRESSION_COMPLEXVALUE_PROPERTIESENTRY._serialized_options = b'8\001' + _EXPRESSION_COMPLEXVALUE_PROPERTIESENTRY._serialized_options = b"8\001" _GENERATOROUTPUTLANGUAGE._serialized_start = 13778 _GENERATOROUTPUTLANGUAGE._serialized_end = 13941 _SNIPPETCONFIG._serialized_start = 117 diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi index fb61af2905cf..bf95a7ed532a 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi +++ b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi @@ -15,14 +15,16 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ + import builtins import collections.abc +import sys +import typing + import google.protobuf.descriptor import google.protobuf.internal.containers import google.protobuf.internal.enum_type_wrapper import google.protobuf.message -import sys -import typing if sys.version_info >= (3, 10): import typing as typing_extensions @@ -35,7 +37,12 @@ class _GeneratorOutputLanguage: ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType -class _GeneratorOutputLanguageEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_GeneratorOutputLanguage.ValueType], builtins.type): +class _GeneratorOutputLanguageEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + _GeneratorOutputLanguage.ValueType + ], + builtins.type, +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor GENERATOR_OUTPUT_LANGUAGE_UNSPECIFIED: _GeneratorOutputLanguage.ValueType # 0 """The language has not been specified. Consumers should not see this value.""" @@ -48,7 +55,9 @@ class _GeneratorOutputLanguageEnumTypeWrapper(google.protobuf.internal.enum_type PYTHON: _GeneratorOutputLanguage.ValueType # 7 RUBY: _GeneratorOutputLanguage.ValueType # 8 -class GeneratorOutputLanguage(_GeneratorOutputLanguage, metaclass=_GeneratorOutputLanguageEnumTypeWrapper): +class GeneratorOutputLanguage( + _GeneratorOutputLanguage, metaclass=_GeneratorOutputLanguageEnumTypeWrapper +): """A programming language in which snippets are generated. Note that this is different from google.cloud.tools.snippetgen.snippetindex.v1.Language, i.e. language @@ -104,8 +113,32 @@ class SnippetConfig(google.protobuf.message.Message): signature: global___SnippetSignature | None = ..., snippet: global___Snippet | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "rpc", b"rpc", "signature", b"signature", "snippet", b"snippet"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "rpc", b"rpc", "signature", b"signature", "snippet", b"snippet"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "metadata", + b"metadata", + "rpc", + b"rpc", + "signature", + b"signature", + "snippet", + b"snippet", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "metadata", + b"metadata", + "rpc", + b"rpc", + "signature", + b"signature", + "snippet", + b"snippet", + ], + ) -> None: ... global___SnippetConfig = SnippetConfig @@ -128,7 +161,11 @@ class SnippetConfigMetadata(google.protobuf.message.Message): Defaults to false. """ @property - def skipped_languages(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___GeneratorOutputLanguage.ValueType]: + def skipped_languages( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[ + global___GeneratorOutputLanguage.ValueType + ]: """List of languages to skip snippet generation for. Config producers should specify here languages for which some of the capabilities required by the snippet are not supported. For instance, if a @@ -168,12 +205,29 @@ class SnippetConfigMetadata(google.protobuf.message.Message): self, *, skipped: builtins.bool = ..., - skipped_languages: collections.abc.Iterable[global___GeneratorOutputLanguage.ValueType] | None = ..., + skipped_languages: collections.abc.Iterable[ + global___GeneratorOutputLanguage.ValueType + ] + | None = ..., config_id: builtins.str = ..., snippet_name: builtins.str = ..., snippet_description: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["config_id", b"config_id", "skipped", b"skipped", "skipped_languages", b"skipped_languages", "snippet_description", b"snippet_description", "snippet_name", b"snippet_name"]) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "config_id", + b"config_id", + "skipped", + b"skipped", + "skipped_languages", + b"skipped_languages", + "snippet_description", + b"snippet_description", + "snippet_name", + b"snippet_name", + ], + ) -> None: ... global___SnippetConfigMetadata = SnippetConfigMetadata @@ -193,7 +247,9 @@ class Rpc(google.protobuf.message.Message): "google.cloud.translate.v3" the API name is "google.cloud.translate". """ @property - def api_version(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + def api_version( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: """The list of API versions for which one snippet is defined. The same RPC may exist in different versions (v1, v1beta, v2) of the API and may be covered by the same snippet config. @@ -212,7 +268,19 @@ class Rpc(google.protobuf.message.Message): service_name: builtins.str = ..., rpc_name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["api_version", b"api_version", "proto_package", b"proto_package", "rpc_name", b"rpc_name", "service_name", b"service_name"]) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "api_version", + b"api_version", + "proto_package", + b"proto_package", + "rpc_name", + b"rpc_name", + "service_name", + b"service_name", + ], + ) -> None: ... global___Rpc = Rpc @@ -224,7 +292,12 @@ class SnippetSignature(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _SyncPreferenceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[SnippetSignature._SyncPreference.ValueType], builtins.type): # noqa: F821 + class _SyncPreferenceEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + SnippetSignature._SyncPreference.ValueType + ], + builtins.type, + ): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor LANGUAGE_PREFERRED: SnippetSignature._SyncPreference.ValueType # 0 """Generate an async, sync or both snippets as per language @@ -291,7 +364,11 @@ class SnippetSignature(google.protobuf.message.Message): sync_preference: global___SnippetSignature.SyncPreference.ValueType """Synchronous preference indicator for the generated snippet.""" @property - def parameters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement.Declaration]: + def parameters( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Statement.Declaration + ]: """The list of parameters that the snippet will receive. May be empty if the snippet method does not receive parameters. If any, parameters should appear in generated code in the same order as they appear in this field. @@ -305,10 +382,25 @@ class SnippetSignature(google.protobuf.message.Message): snippet_method_name: builtins.str = ..., return_type: global___Type | None = ..., sync_preference: global___SnippetSignature.SyncPreference.ValueType = ..., - parameters: collections.abc.Iterable[global___Statement.Declaration] | None = ..., + parameters: collections.abc.Iterable[global___Statement.Declaration] + | None = ..., + ) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["return_type", b"return_type"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "parameters", + b"parameters", + "return_type", + b"return_type", + "snippet_method_name", + b"snippet_method_name", + "sync_preference", + b"sync_preference", + ], ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["return_type", b"return_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["parameters", b"parameters", "return_type", b"return_type", "snippet_method_name", b"snippet_method_name", "sync_preference", b"sync_preference"]) -> None: ... global___SnippetSignature = SnippetSignature @@ -343,7 +435,12 @@ class Snippet(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ServiceEndpointSchemaEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Snippet.ClientInitialization.ServiceEndpoint._ServiceEndpointSchema.ValueType], builtins.type): # noqa: F821 + class _ServiceEndpointSchemaEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + Snippet.ClientInitialization.ServiceEndpoint._ServiceEndpointSchema.ValueType + ], + builtins.type, + ): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor LANGUAGE_DEFAULT: Snippet.ClientInitialization.ServiceEndpoint._ServiceEndpointSchema.ValueType # 0 """Each language to use their own default. @@ -357,7 +454,9 @@ class Snippet(google.protobuf.message.Message): HTTP: Snippet.ClientInitialization.ServiceEndpoint._ServiceEndpointSchema.ValueType # 2 """Use HTTP for service endpoint schema.""" - class ServiceEndpointSchema(_ServiceEndpointSchema, metaclass=_ServiceEndpointSchemaEnumTypeWrapper): + class ServiceEndpointSchema( + _ServiceEndpointSchema, metaclass=_ServiceEndpointSchemaEnumTypeWrapper + ): """Schemas for the service endpoint.""" LANGUAGE_DEFAULT: Snippet.ClientInitialization.ServiceEndpoint.ServiceEndpointSchema.ValueType # 0 @@ -401,12 +500,28 @@ class Snippet(google.protobuf.message.Message): region: builtins.str = ..., port: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["host", b"host", "port", b"port", "region", b"region", "schema", b"schema"]) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "host", + b"host", + "port", + b"port", + "region", + b"region", + "schema", + b"schema", + ], + ) -> None: ... PRE_CLIENT_INITIALIZATION_FIELD_NUMBER: builtins.int CUSTOM_SERVICE_ENDPOINT_FIELD_NUMBER: builtins.int @property - def pre_client_initialization(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + def pre_client_initialization( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Statement + ]: """Statements to be executed before the service client is initialized. For instance, some Statement.Declaration statements of variables to be used in service client initialization. May be empty. If any, statements will @@ -414,16 +529,33 @@ class Snippet(google.protobuf.message.Message): pre_request_initialization. """ @property - def custom_service_endpoint(self) -> global___Snippet.ClientInitialization.ServiceEndpoint: + def custom_service_endpoint( + self, + ) -> global___Snippet.ClientInitialization.ServiceEndpoint: """Custom endpoint to use in client initialization. Optional.""" def __init__( self, *, - pre_client_initialization: collections.abc.Iterable[global___Statement] | None = ..., - custom_service_endpoint: global___Snippet.ClientInitialization.ServiceEndpoint | None = ..., + pre_client_initialization: collections.abc.Iterable[global___Statement] + | None = ..., + custom_service_endpoint: global___Snippet.ClientInitialization.ServiceEndpoint + | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "custom_service_endpoint", b"custom_service_endpoint" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "custom_service_endpoint", + b"custom_service_endpoint", + "pre_client_initialization", + b"pre_client_initialization", + ], ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["custom_service_endpoint", b"custom_service_endpoint"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["custom_service_endpoint", b"custom_service_endpoint", "pre_client_initialization", b"pre_client_initialization"]) -> None: ... @typing_extensions.final class Standard(google.protobuf.message.Message): @@ -435,7 +567,9 @@ class Snippet(google.protobuf.message.Message): CALL_FIELD_NUMBER: builtins.int RESPONSE_HANDLING_FIELD_NUMBER: builtins.int @property - def request_initialization(self) -> global___Snippet.SimpleRequestInitialization: + def request_initialization( + self, + ) -> global___Snippet.SimpleRequestInitialization: """Describes how to initialize the request object. Required.""" @property def call(self) -> global___Snippet.ClientCall: @@ -446,12 +580,33 @@ class Snippet(google.protobuf.message.Message): def __init__( self, *, - request_initialization: global___Snippet.SimpleRequestInitialization | None = ..., + request_initialization: global___Snippet.SimpleRequestInitialization + | None = ..., call: global___Snippet.ClientCall | None = ..., response_handling: global___Snippet.SimpleResponseHandling | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["call", b"call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["call", b"call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "call", + b"call", + "request_initialization", + b"request_initialization", + "response_handling", + b"response_handling", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "call", + b"call", + "request_initialization", + b"request_initialization", + "response_handling", + b"response_handling", + ], + ) -> None: ... @typing_extensions.final class Paginated(google.protobuf.message.Message): @@ -463,7 +618,9 @@ class Snippet(google.protobuf.message.Message): PAGINATED_CALL_FIELD_NUMBER: builtins.int PAGINATED_HANDLING_FIELD_NUMBER: builtins.int @property - def request_initialization(self) -> global___Snippet.SimpleRequestInitialization: + def request_initialization( + self, + ) -> global___Snippet.SimpleRequestInitialization: """Describes how to initialize the request object. This is the request object used for the initial paginated RPC call. Depending on how the response should be handled subsequent requests will be initialized either @@ -482,12 +639,33 @@ class Snippet(google.protobuf.message.Message): def __init__( self, *, - request_initialization: global___Snippet.SimpleRequestInitialization | None = ..., + request_initialization: global___Snippet.SimpleRequestInitialization + | None = ..., paginated_call: global___Snippet.ClientCall | None = ..., paginated_handling: global___Snippet.PaginatedResponseHandling | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["paginated_call", b"paginated_call", "paginated_handling", b"paginated_handling", "request_initialization", b"request_initialization"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["paginated_call", b"paginated_call", "paginated_handling", b"paginated_handling", "request_initialization", b"request_initialization"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "paginated_call", + b"paginated_call", + "paginated_handling", + b"paginated_handling", + "request_initialization", + b"request_initialization", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "paginated_call", + b"paginated_call", + "paginated_handling", + b"paginated_handling", + "request_initialization", + b"request_initialization", + ], + ) -> None: ... @typing_extensions.final class Lro(google.protobuf.message.Message): @@ -499,7 +677,9 @@ class Snippet(google.protobuf.message.Message): CALL_FIELD_NUMBER: builtins.int LRO_HANDLING_FIELD_NUMBER: builtins.int @property - def request_initialization(self) -> global___Snippet.SimpleRequestInitialization: + def request_initialization( + self, + ) -> global___Snippet.SimpleRequestInitialization: """Describes how to initialize the request object. Required. This describes the request to the LRO operation itself and not to polling operations. @@ -516,12 +696,33 @@ class Snippet(google.protobuf.message.Message): def __init__( self, *, - request_initialization: global___Snippet.SimpleRequestInitialization | None = ..., + request_initialization: global___Snippet.SimpleRequestInitialization + | None = ..., call: global___Snippet.ClientCall | None = ..., lro_handling: global___Snippet.LroResponseHandling | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["call", b"call", "lro_handling", b"lro_handling", "request_initialization", b"request_initialization"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["call", b"call", "lro_handling", b"lro_handling", "request_initialization", b"request_initialization"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "call", + b"call", + "lro_handling", + b"lro_handling", + "request_initialization", + b"request_initialization", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "call", + b"call", + "lro_handling", + b"lro_handling", + "request_initialization", + b"request_initialization", + ], + ) -> None: ... @typing_extensions.final class ClientStreaming(google.protobuf.message.Message): @@ -543,7 +744,9 @@ class Snippet(google.protobuf.message.Message): in which streaming requests are written. Required. """ @property - def request_initialization(self) -> global___Snippet.StreamingRequestInitialization: + def request_initialization( + self, + ) -> global___Snippet.StreamingRequestInitialization: """Describes how to initialize the streaming request objects. Required.""" @property def response_handling(self) -> global___Snippet.SimpleResponseHandling: @@ -556,11 +759,34 @@ class Snippet(google.protobuf.message.Message): *, initialization_call: global___Snippet.ClientCall | None = ..., client_stream_name: builtins.str = ..., - request_initialization: global___Snippet.StreamingRequestInitialization | None = ..., + request_initialization: global___Snippet.StreamingRequestInitialization + | None = ..., response_handling: global___Snippet.SimpleResponseHandling | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["client_stream_name", b"client_stream_name", "initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "initialization_call", + b"initialization_call", + "request_initialization", + b"request_initialization", + "response_handling", + b"response_handling", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "client_stream_name", + b"client_stream_name", + "initialization_call", + b"initialization_call", + "request_initialization", + b"request_initialization", + "response_handling", + b"response_handling", + ], + ) -> None: ... @typing_extensions.final class ServerStreaming(google.protobuf.message.Message): @@ -573,7 +799,9 @@ class Snippet(google.protobuf.message.Message): SERVER_STREAM_NAME_FIELD_NUMBER: builtins.int RESPONSE_HANDLING_FIELD_NUMBER: builtins.int @property - def request_initialization(self) -> global___Snippet.SimpleRequestInitialization: + def request_initialization( + self, + ) -> global___Snippet.SimpleRequestInitialization: """Describes how to initialize the request object. Required.""" @property def initialization_call(self) -> global___Snippet.ClientCall: @@ -591,13 +819,36 @@ class Snippet(google.protobuf.message.Message): def __init__( self, *, - request_initialization: global___Snippet.SimpleRequestInitialization | None = ..., + request_initialization: global___Snippet.SimpleRequestInitialization + | None = ..., initialization_call: global___Snippet.ClientCall | None = ..., server_stream_name: builtins.str = ..., response_handling: global___Snippet.StreamingResponseHandling | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling", "server_stream_name", b"server_stream_name"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "initialization_call", + b"initialization_call", + "request_initialization", + b"request_initialization", + "response_handling", + b"response_handling", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "initialization_call", + b"initialization_call", + "request_initialization", + b"request_initialization", + "response_handling", + b"response_handling", + "server_stream_name", + b"server_stream_name", + ], + ) -> None: ... @typing_extensions.final class BidiStreaming(google.protobuf.message.Message): @@ -620,7 +871,9 @@ class Snippet(google.protobuf.message.Message): in which streaming requests are written. Required. """ @property - def request_initialization(self) -> global___Snippet.StreamingRequestInitialization: + def request_initialization( + self, + ) -> global___Snippet.StreamingRequestInitialization: """Describes how to initialize the streaming request objects. Required.""" server_stream_name: builtins.str """The name of the variable to capture the server stream in, i.e. the object @@ -637,12 +890,37 @@ class Snippet(google.protobuf.message.Message): *, initialization_call: global___Snippet.ClientCall | None = ..., client_stream_name: builtins.str = ..., - request_initialization: global___Snippet.StreamingRequestInitialization | None = ..., + request_initialization: global___Snippet.StreamingRequestInitialization + | None = ..., server_stream_name: builtins.str = ..., response_handling: global___Snippet.StreamingResponseHandling | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["client_stream_name", b"client_stream_name", "initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling", "server_stream_name", b"server_stream_name"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "initialization_call", + b"initialization_call", + "request_initialization", + b"request_initialization", + "response_handling", + b"response_handling", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "client_stream_name", + b"client_stream_name", + "initialization_call", + b"initialization_call", + "request_initialization", + b"request_initialization", + "response_handling", + b"response_handling", + "server_stream_name", + b"server_stream_name", + ], + ) -> None: ... @typing_extensions.final class ClientCall(google.protobuf.message.Message): @@ -655,7 +933,11 @@ class Snippet(google.protobuf.message.Message): PRE_CALL_FIELD_NUMBER: builtins.int @property - def pre_call(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + def pre_call( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Statement + ]: """Statements to be executed before the initial call to the service client method is made. Whether that results in an RPC call or not is operation type and language dependent. May be empty. @@ -667,7 +949,9 @@ class Snippet(google.protobuf.message.Message): *, pre_call: collections.abc.Iterable[global___Statement] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["pre_call", b"pre_call"]) -> None: ... + def ClearField( + self, field_name: typing_extensions.Literal["pre_call", b"pre_call"] + ) -> None: ... @typing_extensions.final class SimpleRequestInitialization(google.protobuf.message.Message): @@ -679,7 +963,11 @@ class Snippet(google.protobuf.message.Message): REQUEST_VALUE_FIELD_NUMBER: builtins.int REQUEST_NAME_FIELD_NUMBER: builtins.int @property - def pre_request_initialization(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + def pre_request_initialization( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Statement + ]: """Statements to be executed before the request object is initialized. For instance, some Statement.Declaration statements of variables to be used in request initialization. May be empty. If any, statements will be @@ -698,12 +986,26 @@ class Snippet(google.protobuf.message.Message): def __init__( self, *, - pre_request_initialization: collections.abc.Iterable[global___Statement] | None = ..., + pre_request_initialization: collections.abc.Iterable[global___Statement] + | None = ..., request_value: global___Expression | None = ..., request_name: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["request_value", b"request_value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["pre_request_initialization", b"pre_request_initialization", "request_name", b"request_name", "request_value", b"request_value"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal["request_value", b"request_value"], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "pre_request_initialization", + b"pre_request_initialization", + "request_name", + b"request_name", + "request_value", + b"request_value", + ], + ) -> None: ... @typing_extensions.final class StreamingRequestInitialization(google.protobuf.message.Message): @@ -719,7 +1021,9 @@ class Snippet(google.protobuf.message.Message): ITERATION_FIELD_NUMBER: builtins.int STREAMING_REQUEST_FIELD_NUMBER: builtins.int @property - def first_streaming_request(self) -> global___Snippet.SimpleRequestInitialization: + def first_streaming_request( + self, + ) -> global___Snippet.SimpleRequestInitialization: """Describes how to initialize the first streaming request. Optional for operations that do not require a specific first request. """ @@ -737,12 +1041,34 @@ class Snippet(google.protobuf.message.Message): def __init__( self, *, - first_streaming_request: global___Snippet.SimpleRequestInitialization | None = ..., + first_streaming_request: global___Snippet.SimpleRequestInitialization + | None = ..., iteration: global___Statement.Iteration | None = ..., - streaming_request: global___Snippet.SimpleRequestInitialization | None = ..., + streaming_request: global___Snippet.SimpleRequestInitialization + | None = ..., + ) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "first_streaming_request", + b"first_streaming_request", + "iteration", + b"iteration", + "streaming_request", + b"streaming_request", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "first_streaming_request", + b"first_streaming_request", + "iteration", + b"iteration", + "streaming_request", + b"streaming_request", + ], ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["first_streaming_request", b"first_streaming_request", "iteration", b"iteration", "streaming_request", b"streaming_request"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["first_streaming_request", b"first_streaming_request", "iteration", b"iteration", "streaming_request", b"streaming_request"]) -> None: ... @typing_extensions.final class SimpleResponseHandling(google.protobuf.message.Message): @@ -760,7 +1086,10 @@ class Snippet(google.protobuf.message.Message): *, response_name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["response_name", b"response_name"]) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal["response_name", b"response_name"], + ) -> None: ... @typing_extensions.final class PaginatedResponseHandling(google.protobuf.message.Message): @@ -779,15 +1108,28 @@ class Snippet(google.protobuf.message.Message): item_name: builtins.str """The name of the variable to capture the current item in. Required.""" @property - def per_item_statements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + def per_item_statements( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Statement + ]: """Statements to execute for each item. Optional.""" def __init__( self, *, item_name: builtins.str = ..., - per_item_statements: collections.abc.Iterable[global___Statement] | None = ..., + per_item_statements: collections.abc.Iterable[global___Statement] + | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "item_name", + b"item_name", + "per_item_statements", + b"per_item_statements", + ], ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["item_name", b"item_name", "per_item_statements", b"per_item_statements"]) -> None: ... @typing_extensions.final class ByPage(google.protobuf.message.Message): @@ -801,7 +1143,11 @@ class Snippet(google.protobuf.message.Message): page_name: builtins.str """The name of the variable to capture the current page in. Required.""" @property - def per_page_statements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + def per_page_statements( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Statement + ]: """Statements to execute for each page. Optional.""" @property def by_item(self) -> global___Snippet.PaginatedResponseHandling.ByItem: @@ -810,11 +1156,24 @@ class Snippet(google.protobuf.message.Message): self, *, page_name: builtins.str = ..., - per_page_statements: collections.abc.Iterable[global___Statement] | None = ..., + per_page_statements: collections.abc.Iterable[global___Statement] + | None = ..., by_item: global___Snippet.PaginatedResponseHandling.ByItem | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["by_item", b"by_item"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["by_item", b"by_item", "page_name", b"page_name", "per_page_statements", b"per_page_statements"]) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["by_item", b"by_item"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "by_item", + b"by_item", + "page_name", + b"page_name", + "per_page_statements", + b"per_page_statements", + ], + ) -> None: ... @typing_extensions.final class NextPageToken(google.protobuf.message.Message): @@ -846,8 +1205,23 @@ class Snippet(google.protobuf.message.Message): explicit_page_size: global___Expression | None = ..., by_page: global___Snippet.PaginatedResponseHandling.ByPage | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["by_page", b"by_page", "explicit_page_size", b"explicit_page_size"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["by_page", b"by_page", "explicit_page_size", b"explicit_page_size", "next_page_token_name", b"next_page_token_name"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "by_page", b"by_page", "explicit_page_size", b"explicit_page_size" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "by_page", + b"by_page", + "explicit_page_size", + b"explicit_page_size", + "next_page_token_name", + b"next_page_token_name", + ], + ) -> None: ... RESPONSE_NAME_FIELD_NUMBER: builtins.int BY_ITEM_FIELD_NUMBER: builtins.int @@ -869,7 +1243,9 @@ class Snippet(google.protobuf.message.Message): needed. """ @property - def next_page_token(self) -> global___Snippet.PaginatedResponseHandling.NextPageToken: + def next_page_token( + self, + ) -> global___Snippet.PaginatedResponseHandling.NextPageToken: """Iterate page by page, explicitly using the next page token. This pagination mode will modify the original request by subsequently setting the next page token obtained from the previous response. @@ -880,11 +1256,45 @@ class Snippet(google.protobuf.message.Message): response_name: builtins.str = ..., by_item: global___Snippet.PaginatedResponseHandling.ByItem | None = ..., by_page: global___Snippet.PaginatedResponseHandling.ByPage | None = ..., - next_page_token: global___Snippet.PaginatedResponseHandling.NextPageToken | None = ..., + next_page_token: global___Snippet.PaginatedResponseHandling.NextPageToken + | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["by_item", b"by_item", "by_page", b"by_page", "next_page_token", b"next_page_token", "pagination_kind", b"pagination_kind"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["by_item", b"by_item", "by_page", b"by_page", "next_page_token", b"next_page_token", "pagination_kind", b"pagination_kind", "response_name", b"response_name"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["pagination_kind", b"pagination_kind"]) -> typing_extensions.Literal["by_item", "by_page", "next_page_token"] | None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "by_item", + b"by_item", + "by_page", + b"by_page", + "next_page_token", + b"next_page_token", + "pagination_kind", + b"pagination_kind", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "by_item", + b"by_item", + "by_page", + b"by_page", + "next_page_token", + b"next_page_token", + "pagination_kind", + b"pagination_kind", + "response_name", + b"response_name", + ], + ) -> None: ... + def WhichOneof( + self, + oneof_group: typing_extensions.Literal[ + "pagination_kind", b"pagination_kind" + ], + ) -> ( + typing_extensions.Literal["by_item", "by_page", "next_page_token"] | None + ): ... @typing_extensions.final class LroResponseHandling(google.protobuf.message.Message): @@ -896,7 +1306,12 @@ class Snippet(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _PollingTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Snippet.LroResponseHandling._PollingType.ValueType], builtins.type): # noqa: F821 + class _PollingTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + Snippet.LroResponseHandling._PollingType.ValueType + ], + builtins.type, + ): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor UNTIL_COMPLETION: Snippet.LroResponseHandling._PollingType.ValueType # 0 """Poll until completion. Default value.""" @@ -942,8 +1357,22 @@ class Snippet(google.protobuf.message.Message): polling_response_name: builtins.str = ..., polling_call: global___Snippet.ClientCall | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["polling_call", b"polling_call"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["polling_call", b"polling_call", "polling_response_name", b"polling_response_name", "polling_type", b"polling_type", "response_name", b"response_name"]) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["polling_call", b"polling_call"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "polling_call", + b"polling_call", + "polling_response_name", + b"polling_response_name", + "polling_type", + b"polling_type", + "response_name", + b"response_name", + ], + ) -> None: ... @typing_extensions.final class StreamingResponseHandling(google.protobuf.message.Message): @@ -958,15 +1387,28 @@ class Snippet(google.protobuf.message.Message): Required. """ @property - def per_stream_response_statements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + def per_stream_response_statements( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Statement + ]: """Statements to execute for each stream response. Optional.""" def __init__( self, *, current_response_name: builtins.str = ..., - per_stream_response_statements: collections.abc.Iterable[global___Statement] | None = ..., + per_stream_response_statements: collections.abc.Iterable[global___Statement] + | None = ..., + ) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "current_response_name", + b"current_response_name", + "per_stream_response_statements", + b"per_stream_response_statements", + ], ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["current_response_name", b"current_response_name", "per_stream_response_statements", b"per_stream_response_statements"]) -> None: ... SERVICE_CLIENT_INITIALIZATION_FIELD_NUMBER: builtins.int STANDARD_FIELD_NUMBER: builtins.int @@ -1000,7 +1442,11 @@ class Snippet(google.protobuf.message.Message): def bidi_streaming(self) -> global___Snippet.BidiStreaming: """A bidirectional streaming RPC operation.""" @property - def final_statements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + def final_statements( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Statement + ]: """Statements to be executed before the snippet ends. For instance, some Statement.StandardOutput statements and the Statement.Return statement. May be empty. If any, statements should appear on generated code in the @@ -1009,7 +1455,8 @@ class Snippet(google.protobuf.message.Message): def __init__( self, *, - service_client_initialization: global___Snippet.ClientInitialization | None = ..., + service_client_initialization: global___Snippet.ClientInitialization + | None = ..., standard: global___Snippet.Standard | None = ..., paginated: global___Snippet.Paginated | None = ..., lro: global___Snippet.Lro | None = ..., @@ -1018,9 +1465,63 @@ class Snippet(google.protobuf.message.Message): bidi_streaming: global___Snippet.BidiStreaming | None = ..., final_statements: collections.abc.Iterable[global___Statement] | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["bidi_streaming", b"bidi_streaming", "call", b"call", "client_streaming", b"client_streaming", "lro", b"lro", "paginated", b"paginated", "server_streaming", b"server_streaming", "service_client_initialization", b"service_client_initialization", "standard", b"standard"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["bidi_streaming", b"bidi_streaming", "call", b"call", "client_streaming", b"client_streaming", "final_statements", b"final_statements", "lro", b"lro", "paginated", b"paginated", "server_streaming", b"server_streaming", "service_client_initialization", b"service_client_initialization", "standard", b"standard"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["call", b"call"]) -> typing_extensions.Literal["standard", "paginated", "lro", "client_streaming", "server_streaming", "bidi_streaming"] | None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "bidi_streaming", + b"bidi_streaming", + "call", + b"call", + "client_streaming", + b"client_streaming", + "lro", + b"lro", + "paginated", + b"paginated", + "server_streaming", + b"server_streaming", + "service_client_initialization", + b"service_client_initialization", + "standard", + b"standard", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "bidi_streaming", + b"bidi_streaming", + "call", + b"call", + "client_streaming", + b"client_streaming", + "final_statements", + b"final_statements", + "lro", + b"lro", + "paginated", + b"paginated", + "server_streaming", + b"server_streaming", + "service_client_initialization", + b"service_client_initialization", + "standard", + b"standard", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["call", b"call"] + ) -> ( + typing_extensions.Literal[ + "standard", + "paginated", + "lro", + "client_streaming", + "server_streaming", + "bidi_streaming", + ] + | None + ): ... global___Snippet = Snippet @@ -1069,8 +1570,23 @@ class Statement(google.protobuf.message.Message): value: global___Expression | None = ..., description: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["type", b"type", "value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "name", b"name", "type", b"type", "value", b"value"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal["type", b"type", "value", b"value"], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "description", + b"description", + "name", + b"name", + "type", + b"type", + "value", + b"value", + ], + ) -> None: ... @typing_extensions.final class StandardOutput(google.protobuf.message.Message): @@ -1092,8 +1608,12 @@ class Statement(google.protobuf.message.Message): *, value: global___Expression | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["value", b"value"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing_extensions.Literal["value", b"value"] + ) -> None: ... @typing_extensions.final class Return(google.protobuf.message.Message): @@ -1112,8 +1632,12 @@ class Statement(google.protobuf.message.Message): *, result: global___Expression | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["result", b"result"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["result", b"result"]) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["result", b"result"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing_extensions.Literal["result", b"result"] + ) -> None: ... @typing_extensions.final class Conditional(google.protobuf.message.Message): @@ -1130,12 +1654,20 @@ class Statement(google.protobuf.message.Message): def condition(self) -> global___Expression: """The condition to evaluate. Should evaluate to a bolean value.""" @property - def on_true(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + def on_true( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Statement + ]: """The set of statements to execute if condition evaluates to true. The statements should be executed in the order that they appear. """ @property - def on_false(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + def on_false( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Statement + ]: """The set of statements to execute if condition evaluates to false. The statements should be executed in the order that they appear. """ @@ -1146,8 +1678,20 @@ class Statement(google.protobuf.message.Message): on_true: collections.abc.Iterable[global___Statement] | None = ..., on_false: collections.abc.Iterable[global___Statement] | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["condition", b"condition"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["condition", b"condition", "on_false", b"on_false", "on_true", b"on_true"]) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["condition", b"condition"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "condition", + b"condition", + "on_false", + b"on_false", + "on_true", + b"on_true", + ], + ) -> None: ... @typing_extensions.final class Iteration(google.protobuf.message.Message): @@ -1234,12 +1778,73 @@ class Statement(google.protobuf.message.Message): greater_than: global___Expression | None = ..., total_steps: global___Expression | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["end", b"end", "greater_than", b"greater_than", "greater_than_or_equal", b"greater_than_or_equal", "increment", b"increment", "less_than", b"less_than", "less_than_or_equal", b"less_than_or_equal", "multiplier", b"multiplier", "start_at", b"start_at", "step", b"step", "total_steps", b"total_steps"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "greater_than", b"greater_than", "greater_than_or_equal", b"greater_than_or_equal", "increment", b"increment", "less_than", b"less_than", "less_than_or_equal", b"less_than_or_equal", "multiplier", b"multiplier", "start_at", b"start_at", "step", b"step", "total_steps", b"total_steps"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "end", + b"end", + "greater_than", + b"greater_than", + "greater_than_or_equal", + b"greater_than_or_equal", + "increment", + b"increment", + "less_than", + b"less_than", + "less_than_or_equal", + b"less_than_or_equal", + "multiplier", + b"multiplier", + "start_at", + b"start_at", + "step", + b"step", + "total_steps", + b"total_steps", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "end", + b"end", + "greater_than", + b"greater_than", + "greater_than_or_equal", + b"greater_than_or_equal", + "increment", + b"increment", + "less_than", + b"less_than", + "less_than_or_equal", + b"less_than_or_equal", + "multiplier", + b"multiplier", + "start_at", + b"start_at", + "step", + b"step", + "total_steps", + b"total_steps", + ], + ) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["end", b"end"]) -> typing_extensions.Literal["less_than_or_equal", "less_than", "greater_than_or_equal", "greater_than", "total_steps"] | None: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["end", b"end"] + ) -> ( + typing_extensions.Literal[ + "less_than_or_equal", + "less_than", + "greater_than_or_equal", + "greater_than", + "total_steps", + ] + | None + ): ... @typing.overload - def WhichOneof(self, oneof_group: typing_extensions.Literal["step", b"step"]) -> typing_extensions.Literal["increment", "multiplier"] | None: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["step", b"step"] + ) -> typing_extensions.Literal["increment", "multiplier"] | None: ... @typing_extensions.final class RepeatedIteration(google.protobuf.message.Message): @@ -1271,8 +1876,21 @@ class Statement(google.protobuf.message.Message): repeated_elements: global___Statement.Declaration | None = ..., current_name: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["repeated_elements", b"repeated_elements"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["current_name", b"current_name", "repeated_elements", b"repeated_elements"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "repeated_elements", b"repeated_elements" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "current_name", + b"current_name", + "repeated_elements", + b"repeated_elements", + ], + ) -> None: ... @typing_extensions.final class MapIteration(google.protobuf.message.Message): @@ -1311,8 +1929,20 @@ class Statement(google.protobuf.message.Message): current_key_name: builtins.str = ..., current_value_name: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["map", b"map"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["current_key_name", b"current_key_name", "current_value_name", b"current_value_name", "map", b"map"]) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["map", b"map"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "current_key_name", + b"current_key_name", + "current_value_name", + b"current_value_name", + "map", + b"map", + ], + ) -> None: ... @typing_extensions.final class BytesIteration(google.protobuf.message.Message): @@ -1365,9 +1995,41 @@ class Statement(google.protobuf.message.Message): chunk_type: global___Type.BytesType | None = ..., current_name: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["byte_sequence", b"byte_sequence", "chunk", b"chunk", "chunk_size", b"chunk_size", "chunk_type", b"chunk_type", "total_chunks", b"total_chunks"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["byte_sequence", b"byte_sequence", "chunk", b"chunk", "chunk_size", b"chunk_size", "chunk_type", b"chunk_type", "current_name", b"current_name", "total_chunks", b"total_chunks"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["chunk", b"chunk"]) -> typing_extensions.Literal["chunk_size", "total_chunks"] | None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "byte_sequence", + b"byte_sequence", + "chunk", + b"chunk", + "chunk_size", + b"chunk_size", + "chunk_type", + b"chunk_type", + "total_chunks", + b"total_chunks", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "byte_sequence", + b"byte_sequence", + "chunk", + b"chunk", + "chunk_size", + b"chunk_size", + "chunk_type", + b"chunk_type", + "current_name", + b"current_name", + "total_chunks", + b"total_chunks", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["chunk", b"chunk"] + ) -> typing_extensions.Literal["chunk_size", "total_chunks"] | None: ... NUMERIC_SEQUENCE_ITERATION_FIELD_NUMBER: builtins.int REPEATED_ITERATION_FIELD_NUMBER: builtins.int @@ -1375,7 +2037,9 @@ class Statement(google.protobuf.message.Message): BYTES_ITERATION_FIELD_NUMBER: builtins.int STATEMENTS_FIELD_NUMBER: builtins.int @property - def numeric_sequence_iteration(self) -> global___Statement.Iteration.NumericSequenceIteration: + def numeric_sequence_iteration( + self, + ) -> global___Statement.Iteration.NumericSequenceIteration: """A numeric sequence iteration.""" @property def repeated_iteration(self) -> global___Statement.Iteration.RepeatedIteration: @@ -1387,7 +2051,11 @@ class Statement(google.protobuf.message.Message): def bytes_iteration(self) -> global___Statement.Iteration.BytesIteration: """A bytes sequence iteration.""" @property - def statements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + def statements( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Statement + ]: """The set of statements to execute on each step of the iteration. The statements should be executed in the order that they appear. May be empty as iterations will also be used for streaming request @@ -1396,15 +2064,58 @@ class Statement(google.protobuf.message.Message): def __init__( self, *, - numeric_sequence_iteration: global___Statement.Iteration.NumericSequenceIteration | None = ..., - repeated_iteration: global___Statement.Iteration.RepeatedIteration | None = ..., + numeric_sequence_iteration: global___Statement.Iteration.NumericSequenceIteration + | None = ..., + repeated_iteration: global___Statement.Iteration.RepeatedIteration + | None = ..., map_iteration: global___Statement.Iteration.MapIteration | None = ..., bytes_iteration: global___Statement.Iteration.BytesIteration | None = ..., statements: collections.abc.Iterable[global___Statement] | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["bytes_iteration", b"bytes_iteration", "iteration_type", b"iteration_type", "map_iteration", b"map_iteration", "numeric_sequence_iteration", b"numeric_sequence_iteration", "repeated_iteration", b"repeated_iteration"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["bytes_iteration", b"bytes_iteration", "iteration_type", b"iteration_type", "map_iteration", b"map_iteration", "numeric_sequence_iteration", b"numeric_sequence_iteration", "repeated_iteration", b"repeated_iteration", "statements", b"statements"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["iteration_type", b"iteration_type"]) -> typing_extensions.Literal["numeric_sequence_iteration", "repeated_iteration", "map_iteration", "bytes_iteration"] | None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "bytes_iteration", + b"bytes_iteration", + "iteration_type", + b"iteration_type", + "map_iteration", + b"map_iteration", + "numeric_sequence_iteration", + b"numeric_sequence_iteration", + "repeated_iteration", + b"repeated_iteration", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "bytes_iteration", + b"bytes_iteration", + "iteration_type", + b"iteration_type", + "map_iteration", + b"map_iteration", + "numeric_sequence_iteration", + b"numeric_sequence_iteration", + "repeated_iteration", + b"repeated_iteration", + "statements", + b"statements", + ], + ) -> None: ... + def WhichOneof( + self, + oneof_group: typing_extensions.Literal["iteration_type", b"iteration_type"], + ) -> ( + typing_extensions.Literal[ + "numeric_sequence_iteration", + "repeated_iteration", + "map_iteration", + "bytes_iteration", + ] + | None + ): ... DECLARATION_FIELD_NUMBER: builtins.int STANDARD_OUTPUT_FIELD_NUMBER: builtins.int @@ -1442,9 +2153,49 @@ class Statement(google.protobuf.message.Message): conditional: global___Statement.Conditional | None = ..., iteration: global___Statement.Iteration | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["conditional", b"conditional", "declaration", b"declaration", "iteration", b"iteration", "return", b"return", "standard_output", b"standard_output", "statement_type", b"statement_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["conditional", b"conditional", "declaration", b"declaration", "iteration", b"iteration", "return", b"return", "standard_output", b"standard_output", "statement_type", b"statement_type"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["statement_type", b"statement_type"]) -> typing_extensions.Literal["declaration", "standard_output", "return", "conditional", "iteration"] | None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "conditional", + b"conditional", + "declaration", + b"declaration", + "iteration", + b"iteration", + "return", + b"return", + "standard_output", + b"standard_output", + "statement_type", + b"statement_type", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "conditional", + b"conditional", + "declaration", + b"declaration", + "iteration", + b"iteration", + "return", + b"return", + "standard_output", + b"standard_output", + "statement_type", + b"statement_type", + ], + ) -> None: ... + def WhichOneof( + self, + oneof_group: typing_extensions.Literal["statement_type", b"statement_type"], + ) -> ( + typing_extensions.Literal[ + "declaration", "standard_output", "return", "conditional", "iteration" + ] + | None + ): ... global___Statement = Statement @@ -1460,7 +2211,12 @@ class Type(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _ScalarTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Type._ScalarType.ValueType], builtins.type): # noqa: F821 + class _ScalarTypeEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + Type._ScalarType.ValueType + ], + builtins.type, + ): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor SCALAR_TYPE_UNDEFINED: Type._ScalarType.ValueType # 0 """The scalar type has not been specified. Consumers should not see this @@ -1534,7 +2290,10 @@ class Type(google.protobuf.message.Message): *, enum_full_name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["enum_full_name", b"enum_full_name"]) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal["enum_full_name", b"enum_full_name"], + ) -> None: ... @typing_extensions.final class BytesType(google.protobuf.message.Message): @@ -1546,7 +2305,12 @@ class Type(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _LanguageEquivalentEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Type.BytesType._LanguageEquivalent.ValueType], builtins.type): # noqa: F821 + class _LanguageEquivalentEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + Type.BytesType._LanguageEquivalent.ValueType + ], + builtins.type, + ): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PROTOBUF_BYTES: Type.BytesType._LanguageEquivalent.ValueType # 0 """The same type used for representing protobuf bytes fields.""" @@ -1559,7 +2323,9 @@ class Type(google.protobuf.message.Message): STREAM: Type.BytesType._LanguageEquivalent.ValueType # 3 """Language-specific stream type.""" - class LanguageEquivalent(_LanguageEquivalent, metaclass=_LanguageEquivalentEnumTypeWrapper): + class LanguageEquivalent( + _LanguageEquivalent, metaclass=_LanguageEquivalentEnumTypeWrapper + ): """Possible language-specific equivalents to a bytes type.""" PROTOBUF_BYTES: Type.BytesType.LanguageEquivalent.ValueType # 0 @@ -1581,7 +2347,12 @@ class Type(google.protobuf.message.Message): *, language_equivalent: global___Type.BytesType.LanguageEquivalent.ValueType = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["language_equivalent", b"language_equivalent"]) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "language_equivalent", b"language_equivalent" + ], + ) -> None: ... @typing_extensions.final class MessageType(google.protobuf.message.Message): @@ -1600,7 +2371,12 @@ class Type(google.protobuf.message.Message): *, message_full_name: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["message_full_name", b"message_full_name"]) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "message_full_name", b"message_full_name" + ], + ) -> None: ... @typing_extensions.final class RepeatedType(google.protobuf.message.Message): @@ -1612,7 +2388,12 @@ class Type(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _LanguageEquivalentEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Type.RepeatedType._LanguageEquivalent.ValueType], builtins.type): # noqa: F821 + class _LanguageEquivalentEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + Type.RepeatedType._LanguageEquivalent.ValueType + ], + builtins.type, + ): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PROTOBUF_REPEATED: Type.RepeatedType._LanguageEquivalent.ValueType # 0 """The same type used for representing protobuf repeated fields.""" @@ -1621,7 +2402,9 @@ class Type(google.protobuf.message.Message): LIST: Type.RepeatedType._LanguageEquivalent.ValueType # 2 """Language-specific list type.""" - class LanguageEquivalent(_LanguageEquivalent, metaclass=_LanguageEquivalentEnumTypeWrapper): + class LanguageEquivalent( + _LanguageEquivalent, metaclass=_LanguageEquivalentEnumTypeWrapper + ): """Possible language-specific equivalents to a repeated type.""" PROTOBUF_REPEATED: Type.RepeatedType.LanguageEquivalent.ValueType # 0 @@ -1646,8 +2429,18 @@ class Type(google.protobuf.message.Message): element_type: global___Type | None = ..., language_equivalent: global___Type.RepeatedType.LanguageEquivalent.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["element_type", b"element_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["element_type", b"element_type", "language_equivalent", b"language_equivalent"]) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["element_type", b"element_type"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "element_type", + b"element_type", + "language_equivalent", + b"language_equivalent", + ], + ) -> None: ... @typing_extensions.final class MapType(google.protobuf.message.Message): @@ -1659,14 +2452,21 @@ class Type(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _LanguageEquivalentEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Type.MapType._LanguageEquivalent.ValueType], builtins.type): # noqa: F821 + class _LanguageEquivalentEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + Type.MapType._LanguageEquivalent.ValueType + ], + builtins.type, + ): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor PROTOBUF_MAP: Type.MapType._LanguageEquivalent.ValueType # 0 """The same type used for representing protobuf map fields.""" DICTIONARY: Type.MapType._LanguageEquivalent.ValueType # 1 """Language-specific dictionary or map type.""" - class LanguageEquivalent(_LanguageEquivalent, metaclass=_LanguageEquivalentEnumTypeWrapper): + class LanguageEquivalent( + _LanguageEquivalent, metaclass=_LanguageEquivalentEnumTypeWrapper + ): """Possible language-specific equivalents to a map type.""" PROTOBUF_MAP: Type.MapType.LanguageEquivalent.ValueType # 0 @@ -1692,8 +2492,23 @@ class Type(google.protobuf.message.Message): value_type: global___Type | None = ..., language_equivalent: global___Type.MapType.LanguageEquivalent.ValueType = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["key_type", b"key_type", "value_type", b"value_type"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["key_type", b"key_type", "language_equivalent", b"language_equivalent", "value_type", b"value_type"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "key_type", b"key_type", "value_type", b"value_type" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "key_type", + b"key_type", + "language_equivalent", + b"language_equivalent", + "value_type", + b"value_type", + ], + ) -> None: ... SCALAR_TYPE_FIELD_NUMBER: builtins.int ENUM_TYPE_FIELD_NUMBER: builtins.int @@ -1728,9 +2543,57 @@ class Type(google.protobuf.message.Message): repeated_type: global___Type.RepeatedType | None = ..., map_type: global___Type.MapType | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["bytes_type", b"bytes_type", "enum_type", b"enum_type", "map_type", b"map_type", "message_type", b"message_type", "repeated_type", b"repeated_type", "scalar_type", b"scalar_type", "type_kind", b"type_kind"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["bytes_type", b"bytes_type", "enum_type", b"enum_type", "map_type", b"map_type", "message_type", b"message_type", "repeated_type", b"repeated_type", "scalar_type", b"scalar_type", "type_kind", b"type_kind"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["type_kind", b"type_kind"]) -> typing_extensions.Literal["scalar_type", "enum_type", "bytes_type", "message_type", "repeated_type", "map_type"] | None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "bytes_type", + b"bytes_type", + "enum_type", + b"enum_type", + "map_type", + b"map_type", + "message_type", + b"message_type", + "repeated_type", + b"repeated_type", + "scalar_type", + b"scalar_type", + "type_kind", + b"type_kind", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "bytes_type", + b"bytes_type", + "enum_type", + b"enum_type", + "map_type", + b"map_type", + "message_type", + b"message_type", + "repeated_type", + b"repeated_type", + "scalar_type", + b"scalar_type", + "type_kind", + b"type_kind", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["type_kind", b"type_kind"] + ) -> ( + typing_extensions.Literal[ + "scalar_type", + "enum_type", + "bytes_type", + "message_type", + "repeated_type", + "map_type", + ] + | None + ): ... global___Type = Type @@ -1753,7 +2616,12 @@ class Expression(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _NullValueEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Expression._NullValue.ValueType], builtins.type): # noqa: F821 + class _NullValueEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + Expression._NullValue.ValueType + ], + builtins.type, + ): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor NULL_VALUE: Expression._NullValue.ValueType # 0 """Null value.""" @@ -1768,7 +2636,12 @@ class Expression(google.protobuf.message.Message): ValueType = typing.NewType("ValueType", builtins.int) V: typing_extensions.TypeAlias = ValueType - class _DefaultValueEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Expression._DefaultValue.ValueType], builtins.type): # noqa: F821 + class _DefaultValueEnumTypeWrapper( + google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ + Expression._DefaultValue.ValueType + ], + builtins.type, + ): # noqa: F821 DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DEFAULT_VALUE: Expression._DefaultValue.ValueType # 0 """Default value.""" @@ -1795,7 +2668,11 @@ class Expression(google.protobuf.message.Message): name: builtins.str """The name of the variable or parameter name. Required.""" @property - def path(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + def path( + self, + ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[ + builtins.str + ]: """A path within name that refers to a nested value. Optional. Note that this path must be valid across all languages, so, the following rules apply. @@ -1833,7 +2710,10 @@ class Expression(google.protobuf.message.Message): name: builtins.str = ..., path: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "path", b"path"]) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal["name", b"name", "path", b"path"], + ) -> None: ... @typing_extensions.final class BytesValue(google.protobuf.message.Message): @@ -1860,8 +2740,12 @@ class Expression(google.protobuf.message.Message): *, file_path: global___Expression | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["file_path", b"file_path"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["file_path", b"file_path"]) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["file_path", b"file_path"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing_extensions.Literal["file_path", b"file_path"] + ) -> None: ... BASE64_STRING_FIELD_NUMBER: builtins.int FILE_STREAM_FIELD_NUMBER: builtins.int @@ -1879,9 +2763,31 @@ class Expression(google.protobuf.message.Message): base64_string: global___Expression | None = ..., file_stream: global___Expression.BytesValue.FileStream | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["base64_string", b"base64_string", "file_stream", b"file_stream", "value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["base64_string", b"base64_string", "file_stream", b"file_stream", "value", b"value"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["base64_string", "file_stream"] | None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "base64_string", + b"base64_string", + "file_stream", + b"file_stream", + "value", + b"value", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "base64_string", + b"base64_string", + "file_stream", + b"file_stream", + "value", + b"value", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["value", b"value"] + ) -> typing_extensions.Literal["base64_string", "file_stream"] | None: ... @typing_extensions.final class ComplexValue(google.protobuf.message.Message): @@ -1906,12 +2812,21 @@ class Expression(google.protobuf.message.Message): key: builtins.str = ..., value: global___Expression | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + def HasField( + self, field_name: typing_extensions.Literal["value", b"value"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal["key", b"key", "value", b"value"], + ) -> None: ... PROPERTIES_FIELD_NUMBER: builtins.int @property - def properties(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Expression]: + def properties( + self, + ) -> google.protobuf.internal.containers.MessageMap[ + builtins.str, global___Expression + ]: """This is a simple map from message property name to Expression. - All keys in the map should correspond to top level properties of the protobuf message. @@ -1923,9 +2838,12 @@ class Expression(google.protobuf.message.Message): def __init__( self, *, - properties: collections.abc.Mapping[builtins.str, global___Expression] | None = ..., + properties: collections.abc.Mapping[builtins.str, global___Expression] + | None = ..., + ) -> None: ... + def ClearField( + self, field_name: typing_extensions.Literal["properties", b"properties"] ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["properties", b"properties"]) -> None: ... @typing_extensions.final class RepeatedValue(google.protobuf.message.Message): @@ -1937,7 +2855,11 @@ class Expression(google.protobuf.message.Message): VALUES_FIELD_NUMBER: builtins.int @property - def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Expression]: + def values( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Expression + ]: """The values that should be used to initialize a language-specific collection, list, array or similar. - The values should be used in the same order as they appear in values, @@ -1951,7 +2873,9 @@ class Expression(google.protobuf.message.Message): *, values: collections.abc.Iterable[global___Expression] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ... + def ClearField( + self, field_name: typing_extensions.Literal["values", b"values"] + ) -> None: ... @typing_extensions.final class MapValue(google.protobuf.message.Message): @@ -1966,14 +2890,22 @@ class Expression(google.protobuf.message.Message): KEYS_FIELD_NUMBER: builtins.int VALUES_FIELD_NUMBER: builtins.int @property - def keys(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Expression]: + def keys( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Expression + ]: """The keys to use for initializing a language-specific map, dictionary or similar. - Each key Expression should resolve to a type that is assignable to the key type of the target map. """ @property - def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Expression]: + def values( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[ + global___Expression + ]: """The values to use for initializing a language-specific map, dictionary or similar. - Each value Expression should resolve to a type that is assignable to @@ -1987,7 +2919,10 @@ class Expression(google.protobuf.message.Message): keys: collections.abc.Iterable[global___Expression] | None = ..., values: collections.abc.Iterable[global___Expression] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys", "values", b"values"]) -> None: ... + def ClearField( + self, + field_name: typing_extensions.Literal["keys", b"keys", "values", b"values"], + ) -> None: ... @typing_extensions.final class ConditionalOperator(google.protobuf.message.Message): @@ -2022,8 +2957,28 @@ class Expression(google.protobuf.message.Message): on_true: global___Expression | None = ..., on_false: global___Expression | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["condition", b"condition", "on_false", b"on_false", "on_true", b"on_true"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["condition", b"condition", "on_false", b"on_false", "on_true", b"on_true"]) -> None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "condition", + b"condition", + "on_false", + b"on_false", + "on_true", + b"on_true", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "condition", + b"condition", + "on_false", + b"on_false", + "on_true", + b"on_true", + ], + ) -> None: ... NULL_VALUE_FIELD_NUMBER: builtins.int DEFAULT_VALUE_FIELD_NUMBER: builtins.int @@ -2107,8 +3062,88 @@ class Expression(google.protobuf.message.Message): conditional_value: global___Expression.ConditionalOperator | None = ..., description: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["boolean_value", b"boolean_value", "bytes_value", b"bytes_value", "complex_value", b"complex_value", "conditional_value", b"conditional_value", "default_value", b"default_value", "enum_value", b"enum_value", "list_value", b"list_value", "map_value", b"map_value", "name_value", b"name_value", "null_value", b"null_value", "number_value", b"number_value", "string_value", b"string_value", "value", b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["boolean_value", b"boolean_value", "bytes_value", b"bytes_value", "complex_value", b"complex_value", "conditional_value", b"conditional_value", "default_value", b"default_value", "description", b"description", "enum_value", b"enum_value", "list_value", b"list_value", "map_value", b"map_value", "name_value", b"name_value", "null_value", b"null_value", "number_value", b"number_value", "string_value", b"string_value", "value", b"value"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["null_value", "default_value", "name_value", "number_value", "boolean_value", "string_value", "enum_value", "bytes_value", "complex_value", "list_value", "map_value", "conditional_value"] | None: ... + def HasField( + self, + field_name: typing_extensions.Literal[ + "boolean_value", + b"boolean_value", + "bytes_value", + b"bytes_value", + "complex_value", + b"complex_value", + "conditional_value", + b"conditional_value", + "default_value", + b"default_value", + "enum_value", + b"enum_value", + "list_value", + b"list_value", + "map_value", + b"map_value", + "name_value", + b"name_value", + "null_value", + b"null_value", + "number_value", + b"number_value", + "string_value", + b"string_value", + "value", + b"value", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing_extensions.Literal[ + "boolean_value", + b"boolean_value", + "bytes_value", + b"bytes_value", + "complex_value", + b"complex_value", + "conditional_value", + b"conditional_value", + "default_value", + b"default_value", + "description", + b"description", + "enum_value", + b"enum_value", + "list_value", + b"list_value", + "map_value", + b"map_value", + "name_value", + b"name_value", + "null_value", + b"null_value", + "number_value", + b"number_value", + "string_value", + b"string_value", + "value", + b"value", + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing_extensions.Literal["value", b"value"] + ) -> ( + typing_extensions.Literal[ + "null_value", + "default_value", + "name_value", + "number_value", + "boolean_value", + "string_value", + "enum_value", + "bytes_value", + "complex_value", + "list_value", + "map_value", + "conditional_value", + ] + | None + ): ... global___Expression = Expression diff --git a/packages/gapic-generator/gapic/generator/__init__.py b/packages/gapic-generator/gapic/generator/__init__.py index 579201a62800..11dcef85b93a 100644 --- a/packages/gapic-generator/gapic/generator/__init__.py +++ b/packages/gapic-generator/gapic/generator/__init__.py @@ -20,5 +20,4 @@ from .generator import Generator - __all__ = ("Generator",) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index e9e006e74884..ac4b5f017f3f 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -12,29 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. -import jinja2 -import yaml import itertools -import re import os import pathlib +import re import typing -from typing import Any, DefaultDict, Dict, Mapping, Optional, Tuple -from hashlib import sha256 from collections import OrderedDict, defaultdict +from hashlib import sha256 +from typing import Any, DefaultDict, Dict, Mapping, Optional, Tuple + +import jinja2 +import yaml +from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse + +from gapic import utils +from gapic.generator import formatter +from gapic.samplegen import manifest, samplegen +from gapic.samplegen_utils import snippet_index, snippet_metadata_pb2 +from gapic.samplegen_utils.types import DuplicateSample from gapic.samplegen_utils.utils import ( coerce_response_name, is_valid_sample_cfg, render_format_string, ) -from gapic.samplegen_utils.types import DuplicateSample -from gapic.samplegen_utils import snippet_index, snippet_metadata_pb2 -from gapic.samplegen import manifest, samplegen -from gapic.generator import formatter from gapic.schema import api -from gapic import utils from gapic.utils import Options -from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse class Generator: diff --git a/packages/gapic-generator/gapic/samplegen/__init__.py b/packages/gapic-generator/gapic/samplegen/__init__.py index c323f9673ca1..efae9916ffbb 100644 --- a/packages/gapic-generator/gapic/samplegen/__init__.py +++ b/packages/gapic-generator/gapic/samplegen/__init__.py @@ -12,8 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gapic.samplegen import samplegen -from gapic.samplegen import manifest +from gapic.samplegen import manifest, samplegen __all__ = ( "manifest", diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index ada7dab13026..560b45598367 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -14,22 +14,13 @@ import dataclasses import itertools -import jinja2 import json import keyword import os import re import time -import yaml - -from gapic import utils - -from gapic.samplegen_utils import types, snippet_metadata_pb2 # type: ignore -from gapic.samplegen_utils.utils import is_valid_sample_cfg -from gapic.schema import api -from gapic.schema import wrappers - -from collections import defaultdict, namedtuple, ChainMap as chainmap +from collections import ChainMap as chainmap +from collections import defaultdict, namedtuple from typing import ( Any, ChainMap, @@ -43,10 +34,18 @@ Tuple, ) +import jinja2 +import yaml + # There is no library stub file for this module, so ignore it. from google.api import resource_pb2 # type: ignore from google.protobuf import descriptor_pb2 +from gapic import utils +from gapic.samplegen_utils import snippet_metadata_pb2, types # type: ignore +from gapic.samplegen_utils.utils import is_valid_sample_cfg +from gapic.schema import api, wrappers + # Outstanding issues: # * In real sample configs, many variables are # defined with an _implicit_ $resp variable. diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py index 2a4d752f3b90..6d7fd05c232e 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py @@ -13,14 +13,15 @@ # limitations under the License. import re -from typing import Optional, Dict +from typing import Dict, Optional from google.protobuf import json_format +from gapic.samplegen_utils import ( + snippet_metadata_pb2, # type: ignore + types, +) from gapic.schema import api, metadata -from gapic.samplegen_utils import snippet_metadata_pb2 # type: ignore -from gapic.samplegen_utils import types - CLIENT_INIT_RE = re.compile(r"^\s+# Create a client") REQUEST_INIT_RE = re.compile(r"^\s+# Initialize request argument\(s\)") diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py index 17febba690ea..c883c20e75cf 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py @@ -3,20 +3,24 @@ # source: gapic/samplegen_utils/snippet_metadata.proto # type: ignore """Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper + from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import enum_type_wrapper + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,gapic/samplegen_utils/snippet_metadata.proto\x12-google.cloud.tools.snippetgen.snippetindex.v1\"\xa7\x01\n\x05Index\x12T\n\x0e\x63lient_library\x18\x01 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary\x12H\n\x08snippets\x18\x02 \x03(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Snippet\"\x9f\x06\n\x07Snippet\x12\x12\n\nregion_tag\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x0c\n\x04\x66ile\x18\x04 \x01(\t\x12I\n\x08language\x18\x05 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12R\n\rclient_method\x18\x06 \x01(\x0b\x32;.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod\x12\x11\n\tcanonical\x18\x07 \x01(\x08\x12M\n\x06origin\x18\x08 \x01(\x0e\x32=.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Origin\x12P\n\x08segments\x18\t \x03(\x0b\x32>.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment\x1a\xa7\x02\n\x07Segment\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12X\n\x04type\x18\x03 \x01(\x0e\x32J.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.SegmentType\"\xa5\x01\n\x0bSegmentType\x12\x1c\n\x18SEGMENT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\t\n\x05SHORT\x10\x02\x12\x19\n\x15\x43LIENT_INITIALIZATION\x10\x03\x12\x1a\n\x16REQUEST_INITIALIZATION\x10\x04\x12\x15\n\x11REQUEST_EXECUTION\x10\x05\x12\x15\n\x11RESPONSE_HANDLING\x10\x06\"Q\n\x06Origin\x12\x16\n\x12ORIGIN_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x41PI_DEFINITION\x10\x01\x12\n\n\x06\x43ONFIG\x10\x02\x12\x0f\n\x0bHANDWRITTEN\x10\x03\"\xf2\x02\n\x0c\x43lientMethod\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12\r\n\x05\x61sync\x18\x03 \x01(\x08\x12Y\n\nparameters\x18\x04 \x03(\x0b\x32\x45.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter\x12\x13\n\x0bresult_type\x18\x05 \x01(\t\x12L\n\x06\x63lient\x18\x06 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient\x12\x45\n\x06method\x18\x07 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.snippetindex.v1.Method\x1a\'\n\tParameter\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"6\n\rServiceClient\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\"\xbb\x01\n\rClientLibrary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12I\n\x08language\x18\x03 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12@\n\x04\x61pis\x18\x04 \x03(\x0b\x32\x32.google.cloud.tools.snippetgen.snippetindex.v1.Api\"x\n\x06Method\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12G\n\x07service\x18\x03 \x01(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Service\"0\n\x07Service\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\"\"\n\x03\x41pi\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t*\xef\x01\n\x08Language\x12\x18\n\x14LANGUAGE_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43_PLUS_PLUS\x10\x01\x12\x0b\n\x07\x43_SHARP\x10\x02\x12\x08\n\x04\x44\x41RT\x10\x03\x12\n\n\x06\x45LIXIR\x10\x04\x12\n\n\x06\x45RLANG\x10\x05\x12\x0b\n\x07\x46_SHARP\x10\x06\x12\x06\n\x02GO\x10\x07\x12\x08\n\x04JAVA\x10\x08\x12\x0e\n\nJAVASCRIPT\x10\t\x12\n\n\x06KOTLIN\x10\n\x12\x07\n\x03PHP\x10\x0b\x12\n\n\x06PYTHON\x10\x0c\x12\x08\n\x04RUBY\x10\r\x12\x08\n\x04RUST\x10\x0e\x12\t\n\x05SWIFT\x10\x0f\x12\x0e\n\nTYPESCRIPT\x10\x10\x12\n\n\x06VB_NET\x10\x11\x42\x95\x01\xaa\x02-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\xca\x02-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\xea\x02\x32Google::Cloud::Tools::SnippetGen::SnippetIndex::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n,gapic/samplegen_utils/snippet_metadata.proto\x12-google.cloud.tools.snippetgen.snippetindex.v1"\xa7\x01\n\x05Index\x12T\n\x0e\x63lient_library\x18\x01 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary\x12H\n\x08snippets\x18\x02 \x03(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Snippet"\x9f\x06\n\x07Snippet\x12\x12\n\nregion_tag\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x0c\n\x04\x66ile\x18\x04 \x01(\t\x12I\n\x08language\x18\x05 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12R\n\rclient_method\x18\x06 \x01(\x0b\x32;.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod\x12\x11\n\tcanonical\x18\x07 \x01(\x08\x12M\n\x06origin\x18\x08 \x01(\x0e\x32=.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Origin\x12P\n\x08segments\x18\t \x03(\x0b\x32>.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment\x1a\xa7\x02\n\x07Segment\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12X\n\x04type\x18\x03 \x01(\x0e\x32J.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.SegmentType"\xa5\x01\n\x0bSegmentType\x12\x1c\n\x18SEGMENT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\t\n\x05SHORT\x10\x02\x12\x19\n\x15\x43LIENT_INITIALIZATION\x10\x03\x12\x1a\n\x16REQUEST_INITIALIZATION\x10\x04\x12\x15\n\x11REQUEST_EXECUTION\x10\x05\x12\x15\n\x11RESPONSE_HANDLING\x10\x06"Q\n\x06Origin\x12\x16\n\x12ORIGIN_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x41PI_DEFINITION\x10\x01\x12\n\n\x06\x43ONFIG\x10\x02\x12\x0f\n\x0bHANDWRITTEN\x10\x03"\xf2\x02\n\x0c\x43lientMethod\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12\r\n\x05\x61sync\x18\x03 \x01(\x08\x12Y\n\nparameters\x18\x04 \x03(\x0b\x32\x45.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter\x12\x13\n\x0bresult_type\x18\x05 \x01(\t\x12L\n\x06\x63lient\x18\x06 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient\x12\x45\n\x06method\x18\x07 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.snippetindex.v1.Method\x1a\'\n\tParameter\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t"6\n\rServiceClient\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t"\xbb\x01\n\rClientLibrary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12I\n\x08language\x18\x03 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12@\n\x04\x61pis\x18\x04 \x03(\x0b\x32\x32.google.cloud.tools.snippetgen.snippetindex.v1.Api"x\n\x06Method\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12G\n\x07service\x18\x03 \x01(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Service"0\n\x07Service\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t""\n\x03\x41pi\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t*\xef\x01\n\x08Language\x12\x18\n\x14LANGUAGE_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43_PLUS_PLUS\x10\x01\x12\x0b\n\x07\x43_SHARP\x10\x02\x12\x08\n\x04\x44\x41RT\x10\x03\x12\n\n\x06\x45LIXIR\x10\x04\x12\n\n\x06\x45RLANG\x10\x05\x12\x0b\n\x07\x46_SHARP\x10\x06\x12\x06\n\x02GO\x10\x07\x12\x08\n\x04JAVA\x10\x08\x12\x0e\n\nJAVASCRIPT\x10\t\x12\n\n\x06KOTLIN\x10\n\x12\x07\n\x03PHP\x10\x0b\x12\n\n\x06PYTHON\x10\x0c\x12\x08\n\x04RUBY\x10\r\x12\x08\n\x04RUST\x10\x0e\x12\t\n\x05SWIFT\x10\x0f\x12\x0e\n\nTYPESCRIPT\x10\x10\x12\n\n\x06VB_NET\x10\x11\x42\x95\x01\xaa\x02-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\xca\x02-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\xea\x02\x32Google::Cloud::Tools::SnippetGen::SnippetIndex::V1b\x06proto3' +) -_LANGUAGE = DESCRIPTOR.enum_types_by_name['Language'] +_LANGUAGE = DESCRIPTOR.enum_types_by_name["Language"] Language = enum_type_wrapper.EnumTypeWrapper(_LANGUAGE) LANGUAGE_UNSPECIFIED = 0 C_PLUS_PLUS = 1 @@ -38,92 +42,129 @@ VB_NET = 17 -_INDEX = DESCRIPTOR.message_types_by_name['Index'] -_SNIPPET = DESCRIPTOR.message_types_by_name['Snippet'] -_SNIPPET_SEGMENT = _SNIPPET.nested_types_by_name['Segment'] -_CLIENTMETHOD = DESCRIPTOR.message_types_by_name['ClientMethod'] -_CLIENTMETHOD_PARAMETER = _CLIENTMETHOD.nested_types_by_name['Parameter'] -_SERVICECLIENT = DESCRIPTOR.message_types_by_name['ServiceClient'] -_CLIENTLIBRARY = DESCRIPTOR.message_types_by_name['ClientLibrary'] -_METHOD = DESCRIPTOR.message_types_by_name['Method'] -_SERVICE = DESCRIPTOR.message_types_by_name['Service'] -_API = DESCRIPTOR.message_types_by_name['Api'] -_SNIPPET_SEGMENT_SEGMENTTYPE = _SNIPPET_SEGMENT.enum_types_by_name['SegmentType'] -_SNIPPET_ORIGIN = _SNIPPET.enum_types_by_name['Origin'] -Index = _reflection.GeneratedProtocolMessageType('Index', (_message.Message,), { - 'DESCRIPTOR': _INDEX, - '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' - # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Index) - }) +_INDEX = DESCRIPTOR.message_types_by_name["Index"] +_SNIPPET = DESCRIPTOR.message_types_by_name["Snippet"] +_SNIPPET_SEGMENT = _SNIPPET.nested_types_by_name["Segment"] +_CLIENTMETHOD = DESCRIPTOR.message_types_by_name["ClientMethod"] +_CLIENTMETHOD_PARAMETER = _CLIENTMETHOD.nested_types_by_name["Parameter"] +_SERVICECLIENT = DESCRIPTOR.message_types_by_name["ServiceClient"] +_CLIENTLIBRARY = DESCRIPTOR.message_types_by_name["ClientLibrary"] +_METHOD = DESCRIPTOR.message_types_by_name["Method"] +_SERVICE = DESCRIPTOR.message_types_by_name["Service"] +_API = DESCRIPTOR.message_types_by_name["Api"] +_SNIPPET_SEGMENT_SEGMENTTYPE = _SNIPPET_SEGMENT.enum_types_by_name["SegmentType"] +_SNIPPET_ORIGIN = _SNIPPET.enum_types_by_name["Origin"] +Index = _reflection.GeneratedProtocolMessageType( + "Index", + (_message.Message,), + { + "DESCRIPTOR": _INDEX, + "__module__": "gapic.samplegen_utils.snippet_metadata_pb2", + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Index) + }, +) _sym_db.RegisterMessage(Index) -Snippet = _reflection.GeneratedProtocolMessageType('Snippet', (_message.Message,), { - - 'Segment': _reflection.GeneratedProtocolMessageType('Segment', (_message.Message,), { - 'DESCRIPTOR': _SNIPPET_SEGMENT, - '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' - # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment) - }), - 'DESCRIPTOR': _SNIPPET, - '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' - # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Snippet) - }) +Snippet = _reflection.GeneratedProtocolMessageType( + "Snippet", + (_message.Message,), + { + "Segment": _reflection.GeneratedProtocolMessageType( + "Segment", + (_message.Message,), + { + "DESCRIPTOR": _SNIPPET_SEGMENT, + "__module__": "gapic.samplegen_utils.snippet_metadata_pb2", + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment) + }, + ), + "DESCRIPTOR": _SNIPPET, + "__module__": "gapic.samplegen_utils.snippet_metadata_pb2", + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Snippet) + }, +) _sym_db.RegisterMessage(Snippet) _sym_db.RegisterMessage(Snippet.Segment) -ClientMethod = _reflection.GeneratedProtocolMessageType('ClientMethod', (_message.Message,), { - - 'Parameter': _reflection.GeneratedProtocolMessageType('Parameter', (_message.Message,), { - 'DESCRIPTOR': _CLIENTMETHOD_PARAMETER, - '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' - # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter) - }), - 'DESCRIPTOR': _CLIENTMETHOD, - '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' - # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod) - }) +ClientMethod = _reflection.GeneratedProtocolMessageType( + "ClientMethod", + (_message.Message,), + { + "Parameter": _reflection.GeneratedProtocolMessageType( + "Parameter", + (_message.Message,), + { + "DESCRIPTOR": _CLIENTMETHOD_PARAMETER, + "__module__": "gapic.samplegen_utils.snippet_metadata_pb2", + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter) + }, + ), + "DESCRIPTOR": _CLIENTMETHOD, + "__module__": "gapic.samplegen_utils.snippet_metadata_pb2", + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod) + }, +) _sym_db.RegisterMessage(ClientMethod) _sym_db.RegisterMessage(ClientMethod.Parameter) -ServiceClient = _reflection.GeneratedProtocolMessageType('ServiceClient', (_message.Message,), { - 'DESCRIPTOR': _SERVICECLIENT, - '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' - # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient) - }) +ServiceClient = _reflection.GeneratedProtocolMessageType( + "ServiceClient", + (_message.Message,), + { + "DESCRIPTOR": _SERVICECLIENT, + "__module__": "gapic.samplegen_utils.snippet_metadata_pb2", + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient) + }, +) _sym_db.RegisterMessage(ServiceClient) -ClientLibrary = _reflection.GeneratedProtocolMessageType('ClientLibrary', (_message.Message,), { - 'DESCRIPTOR': _CLIENTLIBRARY, - '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' - # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary) - }) +ClientLibrary = _reflection.GeneratedProtocolMessageType( + "ClientLibrary", + (_message.Message,), + { + "DESCRIPTOR": _CLIENTLIBRARY, + "__module__": "gapic.samplegen_utils.snippet_metadata_pb2", + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary) + }, +) _sym_db.RegisterMessage(ClientLibrary) -Method = _reflection.GeneratedProtocolMessageType('Method', (_message.Message,), { - 'DESCRIPTOR': _METHOD, - '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' - # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Method) - }) +Method = _reflection.GeneratedProtocolMessageType( + "Method", + (_message.Message,), + { + "DESCRIPTOR": _METHOD, + "__module__": "gapic.samplegen_utils.snippet_metadata_pb2", + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Method) + }, +) _sym_db.RegisterMessage(Method) -Service = _reflection.GeneratedProtocolMessageType('Service', (_message.Message,), { - 'DESCRIPTOR': _SERVICE, - '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' - # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Service) - }) +Service = _reflection.GeneratedProtocolMessageType( + "Service", + (_message.Message,), + { + "DESCRIPTOR": _SERVICE, + "__module__": "gapic.samplegen_utils.snippet_metadata_pb2", + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Service) + }, +) _sym_db.RegisterMessage(Service) -Api = _reflection.GeneratedProtocolMessageType('Api', (_message.Message,), { - 'DESCRIPTOR': _API, - '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' - # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Api) - }) +Api = _reflection.GeneratedProtocolMessageType( + "Api", + (_message.Message,), + { + "DESCRIPTOR": _API, + "__module__": "gapic.samplegen_utils.snippet_metadata_pb2", + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Api) + }, +) _sym_db.RegisterMessage(Api) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\252\002-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\312\002-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\352\0022Google::Cloud::Tools::SnippetGen::SnippetIndex::V1' + DESCRIPTOR._serialized_options = b"\252\002-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\312\002-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\352\0022Google::Cloud::Tools::SnippetGen::SnippetIndex::V1" _LANGUAGE._serialized_start = 1895 _LANGUAGE._serialized_end = 2134 _INDEX._serialized_start = 96 diff --git a/packages/gapic-generator/gapic/samplegen_utils/types.py b/packages/gapic-generator/gapic/samplegen_utils/types.py index b1b9012bef6e..d8ad5f8d84e3 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/types.py +++ b/packages/gapic-generator/gapic/samplegen_utils/types.py @@ -13,6 +13,7 @@ # limitations under the License. from enum import Enum, auto + from gapic.utils import to_snake_case diff --git a/packages/gapic-generator/gapic/samplegen_utils/utils.py b/packages/gapic-generator/gapic/samplegen_utils/utils.py index dd2fd9eda405..aab2a29c1bfe 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/utils.py +++ b/packages/gapic-generator/gapic/samplegen_utils/utils.py @@ -16,13 +16,12 @@ that will eventually move somewhere else (probably).""" import os -import yaml +from typing import Generator, List, Tuple, Union -from typing import Generator, Tuple, List, Union +import yaml from gapic.samplegen_utils import types - MIN_SCHEMA_VERSION = (1, 2, 0) VALID_CONFIG_TYPE = "com.google.api.codegen.samplegen.v1p2.SampleConfigProto" diff --git a/packages/gapic-generator/gapic/samplegen_utils/yaml.py b/packages/gapic-generator/gapic/samplegen_utils/yaml.py index 0d004254be73..ad5405c54bcc 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/yaml.py +++ b/packages/gapic-generator/gapic/samplegen_utils/yaml.py @@ -13,8 +13,7 @@ # limitations under the License. import dataclasses - -from abc import abstractmethod, ABC +from abc import ABC, abstractmethod from textwrap import indent from typing import List, Optional diff --git a/packages/gapic-generator/gapic/schema/__init__.py b/packages/gapic-generator/gapic/schema/__init__.py index 8232561661cd..0381c5a2b448 100644 --- a/packages/gapic-generator/gapic/schema/__init__.py +++ b/packages/gapic-generator/gapic/schema/__init__.py @@ -20,10 +20,8 @@ These three parts are divided into the three component modules. """ +from gapic.schema import metadata, wrappers from gapic.schema.api import API -from gapic.schema import metadata -from gapic.schema import wrappers - __all__ = ( "API", diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 4ee478a64398..9323f9d3ee1d 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -36,35 +36,30 @@ Set, Tuple, ) -import yaml +import grpc # type: ignore +import yaml +from google.api import ( + annotations_pb2, # type: ignore + client_pb2, # type: ignore + http_pb2, # type: ignore + resource_pb2, # type: ignore + service_pb2, # type: ignore +) from google.api_core import exceptions -from google.api import client_pb2 # type: ignore -from google.api import http_pb2 # type: ignore -from google.api import resource_pb2 # type: ignore -from google.api import service_pb2 # type: ignore from google.cloud import extended_operations_pb2 as ex_ops_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.gapic.metadata import gapic_metadata_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import descriptor_pb2 # type: ignore -from google.protobuf.json_format import MessageToJson -from google.protobuf.json_format import ParseDict from google.protobuf.descriptor import ServiceDescriptor -import grpc # type: ignore from google.protobuf.descriptor_pb2 import MethodDescriptorProto -from google.api import annotations_pb2 # type: ignore -from gapic.schema import metadata -from gapic.schema import mixins -from gapic.schema import wrappers -from gapic.schema import naming as api_naming -from gapic.utils import cached_property -from gapic.utils import nth -from gapic.utils import Options -from gapic.utils import to_snake_case -from gapic.utils import RESERVED_NAMES +from google.protobuf.json_format import MessageToJson, ParseDict +from gapic.schema import metadata, mixins, wrappers +from gapic.schema import naming as api_naming +from gapic.utils import RESERVED_NAMES, Options, cached_property, nth, to_snake_case TRANSPORT_GRPC = "grpc" TRANSPORT_GRPC_ASYNC = "grpc-async" diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 519372f4c0f4..bcc4d80c72c2 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -28,15 +28,12 @@ import dataclasses import re -from typing import FrozenSet, Set, Tuple, Optional +from typing import FrozenSet, Optional, Set, Tuple from google.protobuf import descriptor_pb2 -from gapic.schema import imp -from gapic.schema import naming -from gapic.utils import cached_property -from gapic.utils import cached_proto_context -from gapic.utils import RESERVED_NAMES +from gapic.schema import imp, naming +from gapic.utils import RESERVED_NAMES, cached_property, cached_proto_context # This class is a minor hack to optimize Address's __eq__ method. diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 96b029664fd3..475fdab682be 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -16,7 +16,7 @@ import dataclasses import os import re -from typing import cast, List, Match, Tuple +from typing import List, Match, Tuple, cast from google.protobuf import descriptor_pb2 diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index f654d0a82d18..9c294ad418f6 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -37,39 +37,39 @@ from itertools import chain from typing import ( Any, - cast, + ClassVar, Dict, FrozenSet, - Iterator, Iterable, + Iterator, List, Mapping, - ClassVar, Optional, + Pattern, Sequence, Set, Tuple, Union, - Pattern, + cast, +) + +from google.api import ( + annotations_pb2, # type: ignore + client_pb2, + field_behavior_pb2, + field_info_pb2, + http_pb2, + resource_pb2, + routing_pb2, ) -from google.api import annotations_pb2 # type: ignore -from google.api import client_pb2 -from google.api import field_behavior_pb2 -from google.api import field_info_pb2 -from google.api import http_pb2 -from google.api import resource_pb2 -from google.api import routing_pb2 -from google.api_core import exceptions -from google.api_core import path_template +from google.api_core import exceptions, path_template from google.cloud import extended_operations_pb2 as ex_ops_pb2 # type: ignore from google.protobuf import descriptor_pb2 # type: ignore from google.protobuf.json_format import MessageToDict # type: ignore from gapic import utils from gapic.schema import metadata -from gapic.utils import cached_proto_context -from gapic.utils import uri_sample -from gapic.utils import make_private +from gapic.utils import cached_proto_context, make_private, uri_sample @dataclasses.dataclass(frozen=True) diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 23c573915695..58192f3a8d20 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -12,27 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gapic.utils.cache import cached_property -from gapic.utils.cache import cached_proto_context -from gapic.utils.case import to_snake_case -from gapic.utils.case import to_camel_case -from gapic.utils.checks import is_msg_field_pb -from gapic.utils.checks import is_str_field_pb -from gapic.utils.code import empty -from gapic.utils.code import nth -from gapic.utils.code import partition -from gapic.utils.code import make_private +from gapic.utils.cache import cached_property, cached_proto_context +from gapic.utils.case import to_camel_case, to_snake_case +from gapic.utils.checks import is_msg_field_pb, is_str_field_pb +from gapic.utils.code import empty, make_private, nth, partition from gapic.utils.doc import doc -from gapic.utils.filename import to_valid_filename -from gapic.utils.filename import to_valid_module_name -from gapic.utils.lines import sort_lines -from gapic.utils.lines import wrap +from gapic.utils.filename import to_valid_filename, to_valid_module_name +from gapic.utils.lines import sort_lines, wrap from gapic.utils.options import Options from gapic.utils.reserved_names import RESERVED_NAMES from gapic.utils.rst import rst from gapic.utils.uri_conv import convert_uri_fieldnames - __all__ = ( "cached_property", "cached_proto_context", diff --git a/packages/gapic-generator/gapic/utils/cache.py b/packages/gapic-generator/gapic/utils/cache.py index 637e1c8e88a4..e8349c8c2bc3 100644 --- a/packages/gapic-generator/gapic/utils/cache.py +++ b/packages/gapic-generator/gapic/utils/cache.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import functools import contextlib +import functools import threading diff --git a/packages/gapic-generator/gapic/utils/code.py b/packages/gapic-generator/gapic/utils/code.py index 047e5f1de1d7..05e53783027f 100644 --- a/packages/gapic-generator/gapic/utils/code.py +++ b/packages/gapic-generator/gapic/utils/code.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Callable, Iterable, List, Optional, Tuple, TypeVar import itertools +from typing import Callable, Iterable, List, Optional, Tuple, TypeVar def empty(content: str) -> bool: diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 8cad3e0fbf57..1255c07c9ac9 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -16,7 +16,6 @@ import textwrap from typing import Iterable, Optional - NUMBERED_LIST_REGEX = r"^\d+\. " diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index 30184f12c0a9..1d2140d50c5e 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -12,14 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import defaultdict -from os import path -from typing import Any, DefaultDict, Dict, FrozenSet, List, Optional, Tuple - import dataclasses import json import os import warnings +from collections import defaultdict +from os import path +from typing import Any, DefaultDict, Dict, FrozenSet, List, Optional, Tuple + import yaml from gapic.samplegen_utils import utils as samplegen_utils diff --git a/packages/gapic-generator/gapic/utils/uri_conv.py b/packages/gapic-generator/gapic/utils/uri_conv.py index 2d0a32d242ff..ba3589de3765 100644 --- a/packages/gapic-generator/gapic/utils/uri_conv.py +++ b/packages/gapic-generator/gapic/utils/uri_conv.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gapic.utils.reserved_names import RESERVED_NAMES from google.api_core import path_template +from gapic.utils.reserved_names import RESERVED_NAMES + def convert_uri_fieldnames(uri: str) -> str: """Modify field names in uri_templates to avoid reserved names. diff --git a/packages/gapic-generator/gapic/utils/uri_sample.py b/packages/gapic-generator/gapic/utils/uri_sample.py index 0367d6a21fd1..bfb3d3f0c82f 100644 --- a/packages/gapic-generator/gapic/utils/uri_sample.py +++ b/packages/gapic-generator/gapic/utils/uri_sample.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Generator, Dict, List, Tuple import re +from typing import Any, Dict, Generator, List, Tuple def sample_names() -> Generator[str, None, None]: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index d931a5258814..10db57b78030 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -18,18 +18,18 @@ # PIP_INDEX_URL=https://pypi.org/simple nox from __future__ import absolute_import -from concurrent.futures import ThreadPoolExecutor -from pathlib import Path + import os +import shutil import sys import tempfile import typing -import nox # type: ignore - +from concurrent.futures import ThreadPoolExecutor from contextlib import contextmanager from os import path -import shutil +from pathlib import Path +import nox # type: ignore nox.options.error_on_missing_interpreters = True @@ -719,7 +719,7 @@ def lint(session): """ # TODO(https://github.com/googleapis/google-cloud-python/issues/16186): - # SKIP: This session was not enforced in the standalone (split) repo + # SKIP: This session was not enforced in the standalone (split) repo # and is disabled here to ensure a "move-only" migration. session.skip( "Linting was not enforced in the split repo. " @@ -749,9 +749,11 @@ def lint(session): @nox.session(python=NEWEST_PYTHON) def lint_setup_py(session): # TODO(https://github.com/googleapis/google-cloud-python/issues/16186): - # SKIP: This session was not enforced in the standalone (split) repo + # SKIP: This session was not enforced in the standalone (split) repo # and is disabled here to ensure a "move-only" migration. - session.skip("Skipping now to avoid changing code during migration. See Issue #16186") + session.skip( + "Skipping now to avoid changing code during migration. See Issue #16186" + ) @nox.session(python="3.10") @@ -826,9 +828,11 @@ def prerelease_deps(session, protobuf_implementation): """ Run all tests with pre-release versions of dependencies installed. """ - # TODO(https://github.com/googleapis/google-cloud-python/issues/16184): + # TODO(https://github.com/googleapis/google-cloud-python/issues/16184): # Implement pre-release dependency logic to test against upcoming runtime changes. - session.skip("prerelease_deps session is not yet implemented for gapic-generator-python.") + session.skip( + "prerelease_deps session is not yet implemented for gapic-generator-python." + ) @nox.session(python=NEWEST_PYTHON) @@ -838,6 +842,8 @@ def prerelease_deps(session, protobuf_implementation): ) def core_deps_from_source(session, protobuf_implementation): """Run all tests with core dependencies installed from source.""" - # TODO(https://github.com/googleapis/google-cloud-python/issues/16185): + # TODO(https://github.com/googleapis/google-cloud-python/issues/16185): # Implement logic to install core packages directly from the mono-repo directories. - session.skip("core_deps_from_source session is not yet implemented for gapic-generator-python.") \ No newline at end of file + session.skip( + "core_deps_from_source session is not yet implemented for gapic-generator-python." + ) diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index 6006e00f176f..d3104062eae9 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -15,14 +15,11 @@ import collections import typing -from gapic.schema import metadata -from gapic.schema import naming -from gapic.schema import wrappers -from google.api import annotations_pb2, routing_pb2 -from google.api import client_pb2 -from google.api import http_pb2 +from google.api import annotations_pb2, client_pb2, http_pb2, routing_pb2 from google.protobuf import descriptor_pb2 as desc +from gapic.schema import metadata, naming, wrappers + def make_service( name: str = "Placeholder", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py index 0c9f32e33ecb..c71b29074de6 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py @@ -28,7 +28,6 @@ import os import shlex import sys -import logging from typing import Any # If extensions (or modules to document with autodoc) are in another directory, @@ -83,9 +82,9 @@ root_doc = "index" # General information about the project. -project = u"google-cloud-asset" -copyright = u"2025, Google, LLC" -author = u"Google APIs" +project = "google-cloud-asset" +copyright = "2025, Google, LLC" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -285,7 +284,7 @@ ( root_doc, "google-cloud-asset.tex", - u"google-cloud-asset Documentation", + "google-cloud-asset Documentation", author, "manual", ) @@ -386,6 +385,7 @@ napoleon_use_param = True napoleon_use_rtype = True + # Setup for sphinx behaviors such as warning filters. class UnexpectedUnindentFilter(logging.Filter): """Filter out warnings about unexpected unindentation following bullet lists.""" @@ -413,5 +413,5 @@ def setup(app: Any) -> None: """ # Sphinx's logger is hierarchical. Adding a filter to the # root 'sphinx' logger will catch warnings from all sub-loggers. - logger = logging.getLogger('sphinx') + logger = logging.getLogger("sphinx") logger.addFilter(UnexpectedUnindentFilter()) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py index fd9404f4b60c..69c41a300464 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -18,164 +18,170 @@ __version__ = package_version.__version__ +from google.cloud.asset_v1.services.asset_service.async_client import ( + AssetServiceAsyncClient, +) from google.cloud.asset_v1.services.asset_service.client import AssetServiceClient -from google.cloud.asset_v1.services.asset_service.async_client import AssetServiceAsyncClient - -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningMetadata -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeMoveRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeMoveResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPoliciesRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPoliciesResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedAssetsRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedAssetsResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedContainersRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedContainersResponse -from google.cloud.asset_v1.types.asset_service import AnalyzerOrgPolicy -from google.cloud.asset_v1.types.asset_service import AnalyzerOrgPolicyConstraint -from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryRequest -from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryResponse -from google.cloud.asset_v1.types.asset_service import BatchGetEffectiveIamPoliciesRequest -from google.cloud.asset_v1.types.asset_service import BatchGetEffectiveIamPoliciesResponse -from google.cloud.asset_v1.types.asset_service import BigQueryDestination -from google.cloud.asset_v1.types.asset_service import CreateFeedRequest -from google.cloud.asset_v1.types.asset_service import CreateSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import DeleteFeedRequest -from google.cloud.asset_v1.types.asset_service import DeleteSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import ExportAssetsRequest -from google.cloud.asset_v1.types.asset_service import ExportAssetsResponse -from google.cloud.asset_v1.types.asset_service import Feed -from google.cloud.asset_v1.types.asset_service import FeedOutputConfig -from google.cloud.asset_v1.types.asset_service import GcsDestination -from google.cloud.asset_v1.types.asset_service import GcsOutputResult -from google.cloud.asset_v1.types.asset_service import GetFeedRequest -from google.cloud.asset_v1.types.asset_service import GetSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisOutputConfig -from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisQuery -from google.cloud.asset_v1.types.asset_service import ListAssetsRequest -from google.cloud.asset_v1.types.asset_service import ListAssetsResponse -from google.cloud.asset_v1.types.asset_service import ListFeedsRequest -from google.cloud.asset_v1.types.asset_service import ListFeedsResponse -from google.cloud.asset_v1.types.asset_service import ListSavedQueriesRequest -from google.cloud.asset_v1.types.asset_service import ListSavedQueriesResponse -from google.cloud.asset_v1.types.asset_service import MoveAnalysis -from google.cloud.asset_v1.types.asset_service import MoveAnalysisResult -from google.cloud.asset_v1.types.asset_service import MoveImpact -from google.cloud.asset_v1.types.asset_service import OutputConfig -from google.cloud.asset_v1.types.asset_service import OutputResult -from google.cloud.asset_v1.types.asset_service import PartitionSpec -from google.cloud.asset_v1.types.asset_service import PubsubDestination -from google.cloud.asset_v1.types.asset_service import QueryAssetsOutputConfig -from google.cloud.asset_v1.types.asset_service import QueryAssetsRequest -from google.cloud.asset_v1.types.asset_service import QueryAssetsResponse -from google.cloud.asset_v1.types.asset_service import QueryResult -from google.cloud.asset_v1.types.asset_service import SavedQuery -from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesRequest -from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesResponse -from google.cloud.asset_v1.types.asset_service import SearchAllResourcesRequest -from google.cloud.asset_v1.types.asset_service import SearchAllResourcesResponse -from google.cloud.asset_v1.types.asset_service import TableFieldSchema -from google.cloud.asset_v1.types.asset_service import TableSchema -from google.cloud.asset_v1.types.asset_service import UpdateFeedRequest -from google.cloud.asset_v1.types.asset_service import UpdateSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import ContentType -from google.cloud.asset_v1.types.assets import Asset -from google.cloud.asset_v1.types.assets import AttachedResource -from google.cloud.asset_v1.types.assets import ConditionEvaluation -from google.cloud.asset_v1.types.assets import IamPolicyAnalysisResult -from google.cloud.asset_v1.types.assets import IamPolicyAnalysisState -from google.cloud.asset_v1.types.assets import IamPolicySearchResult -from google.cloud.asset_v1.types.assets import RelatedAsset -from google.cloud.asset_v1.types.assets import RelatedAssets -from google.cloud.asset_v1.types.assets import RelatedResource -from google.cloud.asset_v1.types.assets import RelatedResources -from google.cloud.asset_v1.types.assets import RelationshipAttributes -from google.cloud.asset_v1.types.assets import Resource -from google.cloud.asset_v1.types.assets import ResourceSearchResult -from google.cloud.asset_v1.types.assets import TemporalAsset -from google.cloud.asset_v1.types.assets import TimeWindow -from google.cloud.asset_v1.types.assets import VersionedResource +from google.cloud.asset_v1.types.asset_service import ( + AnalyzeIamPolicyLongrunningMetadata, + AnalyzeIamPolicyLongrunningRequest, + AnalyzeIamPolicyLongrunningResponse, + AnalyzeIamPolicyRequest, + AnalyzeIamPolicyResponse, + AnalyzeMoveRequest, + AnalyzeMoveResponse, + AnalyzeOrgPoliciesRequest, + AnalyzeOrgPoliciesResponse, + AnalyzeOrgPolicyGovernedAssetsRequest, + AnalyzeOrgPolicyGovernedAssetsResponse, + AnalyzeOrgPolicyGovernedContainersRequest, + AnalyzeOrgPolicyGovernedContainersResponse, + AnalyzerOrgPolicy, + AnalyzerOrgPolicyConstraint, + BatchGetAssetsHistoryRequest, + BatchGetAssetsHistoryResponse, + BatchGetEffectiveIamPoliciesRequest, + BatchGetEffectiveIamPoliciesResponse, + BigQueryDestination, + ContentType, + CreateFeedRequest, + CreateSavedQueryRequest, + DeleteFeedRequest, + DeleteSavedQueryRequest, + ExportAssetsRequest, + ExportAssetsResponse, + Feed, + FeedOutputConfig, + GcsDestination, + GcsOutputResult, + GetFeedRequest, + GetSavedQueryRequest, + IamPolicyAnalysisOutputConfig, + IamPolicyAnalysisQuery, + ListAssetsRequest, + ListAssetsResponse, + ListFeedsRequest, + ListFeedsResponse, + ListSavedQueriesRequest, + ListSavedQueriesResponse, + MoveAnalysis, + MoveAnalysisResult, + MoveImpact, + OutputConfig, + OutputResult, + PartitionSpec, + PubsubDestination, + QueryAssetsOutputConfig, + QueryAssetsRequest, + QueryAssetsResponse, + QueryResult, + SavedQuery, + SearchAllIamPoliciesRequest, + SearchAllIamPoliciesResponse, + SearchAllResourcesRequest, + SearchAllResourcesResponse, + TableFieldSchema, + TableSchema, + UpdateFeedRequest, + UpdateSavedQueryRequest, +) +from google.cloud.asset_v1.types.assets import ( + Asset, + AttachedResource, + ConditionEvaluation, + IamPolicyAnalysisResult, + IamPolicyAnalysisState, + IamPolicySearchResult, + RelatedAsset, + RelatedAssets, + RelatedResource, + RelatedResources, + RelationshipAttributes, + Resource, + ResourceSearchResult, + TemporalAsset, + TimeWindow, + VersionedResource, +) -__all__ = ('AssetServiceClient', - 'AssetServiceAsyncClient', - 'AnalyzeIamPolicyLongrunningMetadata', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'BigQueryDestination', - 'CreateFeedRequest', - 'CreateSavedQueryRequest', - 'DeleteFeedRequest', - 'DeleteSavedQueryRequest', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'Feed', - 'FeedOutputConfig', - 'GcsDestination', - 'GcsOutputResult', - 'GetFeedRequest', - 'GetSavedQueryRequest', - 'IamPolicyAnalysisOutputConfig', - 'IamPolicyAnalysisQuery', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'OutputConfig', - 'OutputResult', - 'PartitionSpec', - 'PubsubDestination', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'SavedQuery', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'TableFieldSchema', - 'TableSchema', - 'UpdateFeedRequest', - 'UpdateSavedQueryRequest', - 'ContentType', - 'Asset', - 'AttachedResource', - 'ConditionEvaluation', - 'IamPolicyAnalysisResult', - 'IamPolicyAnalysisState', - 'IamPolicySearchResult', - 'RelatedAsset', - 'RelatedAssets', - 'RelatedResource', - 'RelatedResources', - 'RelationshipAttributes', - 'Resource', - 'ResourceSearchResult', - 'TemporalAsset', - 'TimeWindow', - 'VersionedResource', +__all__ = ( + "AssetServiceClient", + "AssetServiceAsyncClient", + "AnalyzeIamPolicyLongrunningMetadata", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "BigQueryDestination", + "CreateFeedRequest", + "CreateSavedQueryRequest", + "DeleteFeedRequest", + "DeleteSavedQueryRequest", + "ExportAssetsRequest", + "ExportAssetsResponse", + "Feed", + "FeedOutputConfig", + "GcsDestination", + "GcsOutputResult", + "GetFeedRequest", + "GetSavedQueryRequest", + "IamPolicyAnalysisOutputConfig", + "IamPolicyAnalysisQuery", + "ListAssetsRequest", + "ListAssetsResponse", + "ListFeedsRequest", + "ListFeedsResponse", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "OutputConfig", + "OutputResult", + "PartitionSpec", + "PubsubDestination", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "SavedQuery", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "TableFieldSchema", + "TableSchema", + "UpdateFeedRequest", + "UpdateSavedQueryRequest", + "ContentType", + "Asset", + "AttachedResource", + "ConditionEvaluation", + "IamPolicyAnalysisResult", + "IamPolicyAnalysisState", + "IamPolicySearchResult", + "RelatedAsset", + "RelatedAssets", + "RelatedResource", + "RelatedResources", + "RelationshipAttributes", + "Resource", + "ResourceSearchResult", + "TemporalAsset", + "TimeWindow", + "VersionedResource", ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 31068ac47299..d559e116a38b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.asset_v1 import gapic_version as package_version +import sys import google.api_core as api_core -import sys +from google.cloud.asset_v1 import gapic_version as package_version __version__ = package_version.__version__ @@ -28,114 +28,122 @@ import importlib_metadata as metadata -from .services.asset_service import AssetServiceClient -from .services.asset_service import AssetServiceAsyncClient - -from .types.asset_service import AnalyzeIamPolicyLongrunningMetadata -from .types.asset_service import AnalyzeIamPolicyLongrunningRequest -from .types.asset_service import AnalyzeIamPolicyLongrunningResponse -from .types.asset_service import AnalyzeIamPolicyRequest -from .types.asset_service import AnalyzeIamPolicyResponse -from .types.asset_service import AnalyzeMoveRequest -from .types.asset_service import AnalyzeMoveResponse -from .types.asset_service import AnalyzeOrgPoliciesRequest -from .types.asset_service import AnalyzeOrgPoliciesResponse -from .types.asset_service import AnalyzeOrgPolicyGovernedAssetsRequest -from .types.asset_service import AnalyzeOrgPolicyGovernedAssetsResponse -from .types.asset_service import AnalyzeOrgPolicyGovernedContainersRequest -from .types.asset_service import AnalyzeOrgPolicyGovernedContainersResponse -from .types.asset_service import AnalyzerOrgPolicy -from .types.asset_service import AnalyzerOrgPolicyConstraint -from .types.asset_service import BatchGetAssetsHistoryRequest -from .types.asset_service import BatchGetAssetsHistoryResponse -from .types.asset_service import BatchGetEffectiveIamPoliciesRequest -from .types.asset_service import BatchGetEffectiveIamPoliciesResponse -from .types.asset_service import BigQueryDestination -from .types.asset_service import CreateFeedRequest -from .types.asset_service import CreateSavedQueryRequest -from .types.asset_service import DeleteFeedRequest -from .types.asset_service import DeleteSavedQueryRequest -from .types.asset_service import ExportAssetsRequest -from .types.asset_service import ExportAssetsResponse -from .types.asset_service import Feed -from .types.asset_service import FeedOutputConfig -from .types.asset_service import GcsDestination -from .types.asset_service import GcsOutputResult -from .types.asset_service import GetFeedRequest -from .types.asset_service import GetSavedQueryRequest -from .types.asset_service import IamPolicyAnalysisOutputConfig -from .types.asset_service import IamPolicyAnalysisQuery -from .types.asset_service import ListAssetsRequest -from .types.asset_service import ListAssetsResponse -from .types.asset_service import ListFeedsRequest -from .types.asset_service import ListFeedsResponse -from .types.asset_service import ListSavedQueriesRequest -from .types.asset_service import ListSavedQueriesResponse -from .types.asset_service import MoveAnalysis -from .types.asset_service import MoveAnalysisResult -from .types.asset_service import MoveImpact -from .types.asset_service import OutputConfig -from .types.asset_service import OutputResult -from .types.asset_service import PartitionSpec -from .types.asset_service import PubsubDestination -from .types.asset_service import QueryAssetsOutputConfig -from .types.asset_service import QueryAssetsRequest -from .types.asset_service import QueryAssetsResponse -from .types.asset_service import QueryResult -from .types.asset_service import SavedQuery -from .types.asset_service import SearchAllIamPoliciesRequest -from .types.asset_service import SearchAllIamPoliciesResponse -from .types.asset_service import SearchAllResourcesRequest -from .types.asset_service import SearchAllResourcesResponse -from .types.asset_service import TableFieldSchema -from .types.asset_service import TableSchema -from .types.asset_service import UpdateFeedRequest -from .types.asset_service import UpdateSavedQueryRequest -from .types.asset_service import ContentType -from .types.assets import Asset -from .types.assets import AttachedResource -from .types.assets import ConditionEvaluation -from .types.assets import IamPolicyAnalysisResult -from .types.assets import IamPolicyAnalysisState -from .types.assets import IamPolicySearchResult -from .types.assets import RelatedAsset -from .types.assets import RelatedAssets -from .types.assets import RelatedResource -from .types.assets import RelatedResources -from .types.assets import RelationshipAttributes -from .types.assets import Resource -from .types.assets import ResourceSearchResult -from .types.assets import TemporalAsset -from .types.assets import TimeWindow -from .types.assets import VersionedResource +from .services.asset_service import AssetServiceAsyncClient, AssetServiceClient +from .types.asset_service import ( + AnalyzeIamPolicyLongrunningMetadata, + AnalyzeIamPolicyLongrunningRequest, + AnalyzeIamPolicyLongrunningResponse, + AnalyzeIamPolicyRequest, + AnalyzeIamPolicyResponse, + AnalyzeMoveRequest, + AnalyzeMoveResponse, + AnalyzeOrgPoliciesRequest, + AnalyzeOrgPoliciesResponse, + AnalyzeOrgPolicyGovernedAssetsRequest, + AnalyzeOrgPolicyGovernedAssetsResponse, + AnalyzeOrgPolicyGovernedContainersRequest, + AnalyzeOrgPolicyGovernedContainersResponse, + AnalyzerOrgPolicy, + AnalyzerOrgPolicyConstraint, + BatchGetAssetsHistoryRequest, + BatchGetAssetsHistoryResponse, + BatchGetEffectiveIamPoliciesRequest, + BatchGetEffectiveIamPoliciesResponse, + BigQueryDestination, + ContentType, + CreateFeedRequest, + CreateSavedQueryRequest, + DeleteFeedRequest, + DeleteSavedQueryRequest, + ExportAssetsRequest, + ExportAssetsResponse, + Feed, + FeedOutputConfig, + GcsDestination, + GcsOutputResult, + GetFeedRequest, + GetSavedQueryRequest, + IamPolicyAnalysisOutputConfig, + IamPolicyAnalysisQuery, + ListAssetsRequest, + ListAssetsResponse, + ListFeedsRequest, + ListFeedsResponse, + ListSavedQueriesRequest, + ListSavedQueriesResponse, + MoveAnalysis, + MoveAnalysisResult, + MoveImpact, + OutputConfig, + OutputResult, + PartitionSpec, + PubsubDestination, + QueryAssetsOutputConfig, + QueryAssetsRequest, + QueryAssetsResponse, + QueryResult, + SavedQuery, + SearchAllIamPoliciesRequest, + SearchAllIamPoliciesResponse, + SearchAllResourcesRequest, + SearchAllResourcesResponse, + TableFieldSchema, + TableSchema, + UpdateFeedRequest, + UpdateSavedQueryRequest, +) +from .types.assets import ( + Asset, + AttachedResource, + ConditionEvaluation, + IamPolicyAnalysisResult, + IamPolicyAnalysisState, + IamPolicySearchResult, + RelatedAsset, + RelatedAssets, + RelatedResource, + RelatedResources, + RelationshipAttributes, + Resource, + ResourceSearchResult, + TemporalAsset, + TimeWindow, + VersionedResource, +) -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.asset_v1") # type: ignore - api_core.check_dependency_versions("google.cloud.asset_v1") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr( + api_core, "check_dependency_versions" +): # pragma: NO COVER + api_core.check_python_version("google.cloud.asset_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.asset_v1") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: - import warnings import sys + import warnings _py_version_str = sys.version.split()[0] _package_label = "google.cloud.asset_v1" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -173,104 +181,108 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'AssetServiceAsyncClient', -'AnalyzeIamPolicyLongrunningMetadata', -'AnalyzeIamPolicyLongrunningRequest', -'AnalyzeIamPolicyLongrunningResponse', -'AnalyzeIamPolicyRequest', -'AnalyzeIamPolicyResponse', -'AnalyzeMoveRequest', -'AnalyzeMoveResponse', -'AnalyzeOrgPoliciesRequest', -'AnalyzeOrgPoliciesResponse', -'AnalyzeOrgPolicyGovernedAssetsRequest', -'AnalyzeOrgPolicyGovernedAssetsResponse', -'AnalyzeOrgPolicyGovernedContainersRequest', -'AnalyzeOrgPolicyGovernedContainersResponse', -'AnalyzerOrgPolicy', -'AnalyzerOrgPolicyConstraint', -'Asset', -'AssetServiceClient', -'AttachedResource', -'BatchGetAssetsHistoryRequest', -'BatchGetAssetsHistoryResponse', -'BatchGetEffectiveIamPoliciesRequest', -'BatchGetEffectiveIamPoliciesResponse', -'BigQueryDestination', -'ConditionEvaluation', -'ContentType', -'CreateFeedRequest', -'CreateSavedQueryRequest', -'DeleteFeedRequest', -'DeleteSavedQueryRequest', -'ExportAssetsRequest', -'ExportAssetsResponse', -'Feed', -'FeedOutputConfig', -'GcsDestination', -'GcsOutputResult', -'GetFeedRequest', -'GetSavedQueryRequest', -'IamPolicyAnalysisOutputConfig', -'IamPolicyAnalysisQuery', -'IamPolicyAnalysisResult', -'IamPolicyAnalysisState', -'IamPolicySearchResult', -'ListAssetsRequest', -'ListAssetsResponse', -'ListFeedsRequest', -'ListFeedsResponse', -'ListSavedQueriesRequest', -'ListSavedQueriesResponse', -'MoveAnalysis', -'MoveAnalysisResult', -'MoveImpact', -'OutputConfig', -'OutputResult', -'PartitionSpec', -'PubsubDestination', -'QueryAssetsOutputConfig', -'QueryAssetsRequest', -'QueryAssetsResponse', -'QueryResult', -'RelatedAsset', -'RelatedAssets', -'RelatedResource', -'RelatedResources', -'RelationshipAttributes', -'Resource', -'ResourceSearchResult', -'SavedQuery', -'SearchAllIamPoliciesRequest', -'SearchAllIamPoliciesResponse', -'SearchAllResourcesRequest', -'SearchAllResourcesResponse', -'TableFieldSchema', -'TableSchema', -'TemporalAsset', -'TimeWindow', -'UpdateFeedRequest', -'UpdateSavedQueryRequest', -'VersionedResource', + "AssetServiceAsyncClient", + "AnalyzeIamPolicyLongrunningMetadata", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "Asset", + "AssetServiceClient", + "AttachedResource", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "BigQueryDestination", + "ConditionEvaluation", + "ContentType", + "CreateFeedRequest", + "CreateSavedQueryRequest", + "DeleteFeedRequest", + "DeleteSavedQueryRequest", + "ExportAssetsRequest", + "ExportAssetsResponse", + "Feed", + "FeedOutputConfig", + "GcsDestination", + "GcsOutputResult", + "GetFeedRequest", + "GetSavedQueryRequest", + "IamPolicyAnalysisOutputConfig", + "IamPolicyAnalysisQuery", + "IamPolicyAnalysisResult", + "IamPolicyAnalysisState", + "IamPolicySearchResult", + "ListAssetsRequest", + "ListAssetsResponse", + "ListFeedsRequest", + "ListFeedsResponse", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "OutputConfig", + "OutputResult", + "PartitionSpec", + "PubsubDestination", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "RelatedAsset", + "RelatedAssets", + "RelatedResource", + "RelatedResources", + "RelationshipAttributes", + "Resource", + "ResourceSearchResult", + "SavedQuery", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "TableFieldSchema", + "TableSchema", + "TemporalAsset", + "TimeWindow", + "UpdateFeedRequest", + "UpdateSavedQueryRequest", + "VersionedResource", ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py index b35796f58242..9ce0d49d8a2e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import AssetServiceClient from .async_client import AssetServiceAsyncClient +from .client import AssetServiceClient __all__ = ( - 'AssetServiceClient', - 'AssetServiceAsyncClient', + "AssetServiceClient", + "AssetServiceAsyncClient", ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index c16804251ca6..d0e85797da41 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -14,48 +14,59 @@ # limitations under the License. # import logging as std_logging -from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.asset_v1 import gapic_version as package_version +from collections import OrderedDict +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.asset_v1 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.asset_v1.services.asset_service import pagers -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.rpc.status_pb2 as status_pb2 # type: ignore import google.type.expr_pb2 as expr_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport +from google.cloud.asset_v1.services.asset_service import pagers +from google.cloud.asset_v1.types import asset_service, assets +from google.longrunning import operations_pb2 # type: ignore + from .client import AssetServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, AssetServiceTransport +from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class AssetServiceAsyncClient: """Asset service definition.""" @@ -81,17 +92,29 @@ class AssetServiceAsyncClient: saved_query_path = staticmethod(AssetServiceClient.saved_query_path) parse_saved_query_path = staticmethod(AssetServiceClient.parse_saved_query_path) service_perimeter_path = staticmethod(AssetServiceClient.service_perimeter_path) - parse_service_perimeter_path = staticmethod(AssetServiceClient.parse_service_perimeter_path) - common_billing_account_path = staticmethod(AssetServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AssetServiceClient.parse_common_billing_account_path) + parse_service_perimeter_path = staticmethod( + AssetServiceClient.parse_service_perimeter_path + ) + common_billing_account_path = staticmethod( + AssetServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AssetServiceClient.parse_common_billing_account_path + ) common_folder_path = staticmethod(AssetServiceClient.common_folder_path) parse_common_folder_path = staticmethod(AssetServiceClient.parse_common_folder_path) common_organization_path = staticmethod(AssetServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AssetServiceClient.parse_common_organization_path) + parse_common_organization_path = staticmethod( + AssetServiceClient.parse_common_organization_path + ) common_project_path = staticmethod(AssetServiceClient.common_project_path) - parse_common_project_path = staticmethod(AssetServiceClient.parse_common_project_path) + parse_common_project_path = staticmethod( + AssetServiceClient.parse_common_project_path + ) common_location_path = staticmethod(AssetServiceClient.common_location_path) - parse_common_location_path = staticmethod(AssetServiceClient.parse_common_location_path) + parse_common_location_path = staticmethod( + AssetServiceClient.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -133,7 +156,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -196,12 +221,16 @@ def universe_domain(self) -> str: get_transport_class = AssetServiceClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the asset service async client. Args: @@ -259,30 +288,38 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.asset_v1.AssetServiceAsyncClient`.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.cloud.asset.v1.AssetService", "credentialsType": None, - } + }, ) - async def export_assets(self, - request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def export_assets( + self, + request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Exports assets with time and resource types to a given Cloud Storage location/BigQuery table. For Cloud Storage location destinations, the output format is newline-delimited JSON. Each @@ -361,14 +398,14 @@ async def sample_export_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.export_assets] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_assets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -393,14 +430,15 @@ async def sample_export_assets(): # Done; return the response. return response - async def list_assets(self, - request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsAsyncPager: + async def list_assets( + self, + request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAssetsAsyncPager: r"""Lists assets with time and resource types and returns paged results in response. @@ -467,10 +505,14 @@ async def sample_list_assets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -484,14 +526,14 @@ async def sample_list_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_assets] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_assets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -519,13 +561,16 @@ async def sample_list_assets(): # Done; return the response. return response - async def batch_get_assets_history(self, - request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetAssetsHistoryResponse: + async def batch_get_assets_history( + self, + request: Optional[ + Union[asset_service.BatchGetAssetsHistoryRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time window. For IAM_POLICY content, this API outputs history when the asset and its attached IAM POLICY both exist. This can @@ -583,14 +628,14 @@ async def sample_batch_get_assets_history(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_get_assets_history] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_assets_history + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -607,14 +652,15 @@ async def sample_batch_get_assets_history(): # Done; return the response. return response - async def create_feed(self, - request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + async def create_feed( + self, + request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Creates a feed in a parent project/folder/organization to listen to its asset updates. @@ -691,10 +737,14 @@ async def sample_create_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -708,14 +758,14 @@ async def sample_create_feed(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_feed] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_feed + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -732,14 +782,15 @@ async def sample_create_feed(): # Done; return the response. return response - async def get_feed(self, - request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + async def get_feed( + self, + request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Gets details about an asset feed. .. code-block:: python @@ -804,10 +855,14 @@ async def sample_get_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -826,9 +881,7 @@ async def sample_get_feed(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -845,14 +898,15 @@ async def sample_get_feed(): # Done; return the response. return response - async def list_feeds(self, - request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.ListFeedsResponse: + async def list_feeds( + self, + request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent project/folder/organization. @@ -912,10 +966,14 @@ async def sample_list_feeds(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -929,14 +987,14 @@ async def sample_list_feeds(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_feeds] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_feeds + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -953,14 +1011,15 @@ async def sample_list_feeds(): # Done; return the response. return response - async def update_feed(self, - request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, - *, - feed: Optional[asset_service.Feed] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + async def update_feed( + self, + request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, + *, + feed: Optional[asset_service.Feed] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Updates an asset feed configuration. .. code-block:: python @@ -1029,10 +1088,14 @@ async def sample_update_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [feed] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1046,14 +1109,16 @@ async def sample_update_feed(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_feed] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_feed + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("feed.name", request.feed.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("feed.name", request.feed.name),) + ), ) # Validate the universe domain. @@ -1070,14 +1135,15 @@ async def sample_update_feed(): # Done; return the response. return response - async def delete_feed(self, - request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_feed( + self, + request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an asset feed. .. code-block:: python @@ -1127,10 +1193,14 @@ async def sample_delete_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1144,14 +1214,14 @@ async def sample_delete_feed(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_feed] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_feed + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1165,16 +1235,17 @@ async def sample_delete_feed(): metadata=metadata, ) - async def search_all_resources(self, - request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - asset_types: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllResourcesAsyncPager: + async def search_all_resources( + self, + request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + asset_types: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllResourcesAsyncPager: r"""Searches all Google Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllResources`` permission @@ -1342,10 +1413,14 @@ async def sample_search_all_resources(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, query, asset_types] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1363,14 +1438,14 @@ async def sample_search_all_resources(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_all_resources] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.search_all_resources + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -1398,15 +1473,18 @@ async def sample_search_all_resources(): # Done; return the response. return response - async def search_all_iam_policies(self, - request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllIamPoliciesAsyncPager: + async def search_all_iam_policies( + self, + request: Optional[ + Union[asset_service.SearchAllIamPoliciesRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllIamPoliciesAsyncPager: r"""Searches all IAM policies within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllIamPolicies`` permission on the @@ -1536,10 +1614,14 @@ async def sample_search_all_iam_policies(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1555,14 +1637,14 @@ async def sample_search_all_iam_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_all_iam_policies] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.search_all_iam_policies + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -1590,13 +1672,14 @@ async def sample_search_all_iam_policies(): # Done; return the response. return response - async def analyze_iam_policy(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeIamPolicyResponse: + async def analyze_iam_policy( + self, + request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have what accesses on which resources. @@ -1655,14 +1738,16 @@ async def sample_analyze_iam_policy(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_iam_policy] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_iam_policy + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("analysis_query.scope", request.analysis_query.scope),) + ), ) # Validate the universe domain. @@ -1679,13 +1764,16 @@ async def sample_analyze_iam_policy(): # Done; return the response. return response - async def analyze_iam_policy_longrunning(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def analyze_iam_policy_longrunning( + self, + request: Optional[ + Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Analyzes IAM policies asynchronously to answer which identities have what accesses on which resources, and writes the analysis results to a Google Cloud Storage or a BigQuery destination. For @@ -1764,14 +1852,16 @@ async def sample_analyze_iam_policy_longrunning(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_iam_policy_longrunning] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_iam_policy_longrunning + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("analysis_query.scope", request.analysis_query.scope),) + ), ) # Validate the universe domain. @@ -1796,13 +1886,14 @@ async def sample_analyze_iam_policy_longrunning(): # Done; return the response. return response - async def analyze_move(self, - request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeMoveResponse: + async def analyze_move( + self, + request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeMoveResponse: r"""Analyze moving a resource to a specified destination without kicking off the actual move. The analysis is best effort depending on the user's permissions of @@ -1864,14 +1955,14 @@ async def sample_analyze_move(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_move] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_move + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) # Validate the universe domain. @@ -1888,13 +1979,14 @@ async def sample_analyze_move(): # Done; return the response. return response - async def query_assets(self, - request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.QueryAssetsResponse: + async def query_assets( + self, + request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement compatible with `BigQuery Standard SQL `__. @@ -1962,14 +2054,14 @@ async def sample_query_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.query_assets] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.query_assets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1986,16 +2078,17 @@ async def sample_query_assets(): # Done; return the response. return response - async def create_saved_query(self, - request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, - *, - parent: Optional[str] = None, - saved_query: Optional[asset_service.SavedQuery] = None, - saved_query_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + async def create_saved_query( + self, + request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, + *, + parent: Optional[str] = None, + saved_query: Optional[asset_service.SavedQuery] = None, + saved_query_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Creates a saved query in a parent project/folder/organization. @@ -2081,10 +2174,14 @@ async def sample_create_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, saved_query, saved_query_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2102,14 +2199,14 @@ async def sample_create_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_saved_query] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_saved_query + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2126,14 +2223,15 @@ async def sample_create_saved_query(): # Done; return the response. return response - async def get_saved_query(self, - request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + async def get_saved_query( + self, + request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Gets details about a saved query. .. code-block:: python @@ -2194,10 +2292,14 @@ async def sample_get_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2211,14 +2313,14 @@ async def sample_get_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_saved_query] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_saved_query + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2235,14 +2337,15 @@ async def sample_get_saved_query(): # Done; return the response. return response - async def list_saved_queries(self, - request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSavedQueriesAsyncPager: + async def list_saved_queries( + self, + request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSavedQueriesAsyncPager: r"""Lists all saved queries in a parent project/folder/organization. @@ -2309,10 +2412,14 @@ async def sample_list_saved_queries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2326,14 +2433,14 @@ async def sample_list_saved_queries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_saved_queries] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_saved_queries + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2361,15 +2468,16 @@ async def sample_list_saved_queries(): # Done; return the response. return response - async def update_saved_query(self, - request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, - *, - saved_query: Optional[asset_service.SavedQuery] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + async def update_saved_query( + self, + request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, + *, + saved_query: Optional[asset_service.SavedQuery] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Updates a saved query. .. code-block:: python @@ -2438,10 +2546,14 @@ async def sample_update_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [saved_query, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2457,14 +2569,16 @@ async def sample_update_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_saved_query] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_saved_query + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("saved_query.name", request.saved_query.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("saved_query.name", request.saved_query.name),) + ), ) # Validate the universe domain. @@ -2481,14 +2595,15 @@ async def sample_update_saved_query(): # Done; return the response. return response - async def delete_saved_query(self, - request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_saved_query( + self, + request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a saved query. .. code-block:: python @@ -2540,10 +2655,14 @@ async def sample_delete_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2557,14 +2676,14 @@ async def sample_delete_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_saved_query] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_saved_query + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2578,13 +2697,16 @@ async def sample_delete_saved_query(): metadata=metadata, ) - async def batch_get_effective_iam_policies(self, - request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + async def batch_get_effective_iam_policies( + self, + request: Optional[ + Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Gets effective IAM policies for a batch of resources. .. code-block:: python @@ -2640,14 +2762,14 @@ async def sample_batch_get_effective_iam_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_get_effective_iam_policies] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_effective_iam_policies + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -2664,16 +2786,17 @@ async def sample_batch_get_effective_iam_policies(): # Done; return the response. return response - async def analyze_org_policies(self, - request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPoliciesAsyncPager: + async def analyze_org_policies( + self, + request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPoliciesAsyncPager: r"""Analyzes organization policies under a scope. .. code-block:: python @@ -2763,10 +2886,14 @@ async def sample_analyze_org_policies(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2784,14 +2911,14 @@ async def sample_analyze_org_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policies] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_org_policies + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -2819,16 +2946,19 @@ async def sample_analyze_org_policies(): # Done; return the response. return response - async def analyze_org_policy_governed_containers(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: + async def analyze_org_policy_governed_containers( + self, + request: Optional[ + Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: r"""Analyzes organization policies governed containers (projects, folders or organization) under a scope. @@ -2918,14 +3048,20 @@ async def sample_analyze_org_policy_governed_containers(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + if not isinstance( + request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest + ): request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2939,14 +3075,14 @@ async def sample_analyze_org_policy_governed_containers(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policy_governed_containers] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_org_policy_governed_containers + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -2974,16 +3110,19 @@ async def sample_analyze_org_policy_governed_containers(): # Done; return the response. return response - async def analyze_org_policy_governed_assets(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: + async def analyze_org_policy_governed_assets( + self, + request: Optional[ + Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This RPC supports custom constraints and the following 10 canned constraints: @@ -3102,10 +3241,14 @@ async def sample_analyze_org_policy_governed_assets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3123,14 +3266,14 @@ async def sample_analyze_org_policy_governed_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policy_governed_assets] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.analyze_org_policy_governed_assets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -3200,8 +3343,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -3209,7 +3351,11 @@ async def get_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3220,12 +3366,13 @@ async def __aenter__(self) -> "AssetServiceAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "AssetServiceAsyncClient", -) +__all__ = ("AssetServiceAsyncClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 340315ad899f..d91f30900f64 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -13,27 +13,38 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -from http import HTTPStatus import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) -from google.cloud.asset_v1 import gapic_version as package_version - +import google.protobuf from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.asset_v1 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -42,23 +53,24 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) -from google.cloud.asset_v1.services.asset_service import pagers -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.rpc.status_pb2 as status_pb2 # type: ignore import google.type.expr_pb2 as expr_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO +from google.cloud.asset_v1.services.asset_service import pagers +from google.cloud.asset_v1.types import asset_service, assets +from google.longrunning import operations_pb2 # type: ignore + +from .transports.base import DEFAULT_CLIENT_INFO, AssetServiceTransport from .transports.grpc import AssetServiceGrpcTransport from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport from .transports.rest import AssetServiceRestTransport @@ -71,14 +83,16 @@ class AssetServiceClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] _transport_registry["grpc"] = AssetServiceGrpcTransport _transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport _transport_registry["rest"] = AssetServiceRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AssetServiceTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AssetServiceTransport]: """Returns an appropriate transport class. Args: @@ -154,14 +168,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -200,8 +216,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: AssetServiceClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -218,23 +233,36 @@ def transport(self) -> AssetServiceTransport: return self._transport @staticmethod - def access_level_path(access_policy: str,access_level: str,) -> str: + def access_level_path( + access_policy: str, + access_level: str, + ) -> str: """Returns a fully-qualified access_level string.""" - return "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) + return "accessPolicies/{access_policy}/accessLevels/{access_level}".format( + access_policy=access_policy, + access_level=access_level, + ) @staticmethod - def parse_access_level_path(path: str) -> Dict[str,str]: + def parse_access_level_path(path: str) -> Dict[str, str]: """Parses a access_level path into its component segments.""" - m = re.match(r"^accessPolicies/(?P.+?)/accessLevels/(?P.+?)$", path) + m = re.match( + r"^accessPolicies/(?P.+?)/accessLevels/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def access_policy_path(access_policy: str,) -> str: + def access_policy_path( + access_policy: str, + ) -> str: """Returns a fully-qualified access_policy string.""" - return "accessPolicies/{access_policy}".format(access_policy=access_policy, ) + return "accessPolicies/{access_policy}".format( + access_policy=access_policy, + ) @staticmethod - def parse_access_policy_path(path: str) -> Dict[str,str]: + def parse_access_policy_path(path: str) -> Dict[str, str]: """Parses a access_policy path into its component segments.""" m = re.match(r"^accessPolicies/(?P.+?)$", path) return m.groupdict() if m else {} @@ -245,112 +273,170 @@ def asset_path() -> str: return "*".format() @staticmethod - def parse_asset_path(path: str) -> Dict[str,str]: + def parse_asset_path(path: str) -> Dict[str, str]: """Parses a asset path into its component segments.""" m = re.match(r"^.*$", path) return m.groupdict() if m else {} @staticmethod - def feed_path(project: str,feed: str,) -> str: + def feed_path( + project: str, + feed: str, + ) -> str: """Returns a fully-qualified feed string.""" - return "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) + return "projects/{project}/feeds/{feed}".format( + project=project, + feed=feed, + ) @staticmethod - def parse_feed_path(path: str) -> Dict[str,str]: + def parse_feed_path(path: str) -> Dict[str, str]: """Parses a feed path into its component segments.""" m = re.match(r"^projects/(?P.+?)/feeds/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def inventory_path(project: str,location: str,instance: str,) -> str: + def inventory_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified inventory string.""" - return "projects/{project}/locations/{location}/instances/{instance}/inventory".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}/inventory".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_inventory_path(path: str) -> Dict[str,str]: + def parse_inventory_path(path: str) -> Dict[str, str]: """Parses a inventory path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/inventory$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/inventory$", + path, + ) return m.groupdict() if m else {} @staticmethod - def saved_query_path(project: str,saved_query: str,) -> str: + def saved_query_path( + project: str, + saved_query: str, + ) -> str: """Returns a fully-qualified saved_query string.""" - return "projects/{project}/savedQueries/{saved_query}".format(project=project, saved_query=saved_query, ) + return "projects/{project}/savedQueries/{saved_query}".format( + project=project, + saved_query=saved_query, + ) @staticmethod - def parse_saved_query_path(path: str) -> Dict[str,str]: + def parse_saved_query_path(path: str) -> Dict[str, str]: """Parses a saved_query path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/savedQueries/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/savedQueries/(?P.+?)$", path + ) return m.groupdict() if m else {} @staticmethod - def service_perimeter_path(access_policy: str,service_perimeter: str,) -> str: + def service_perimeter_path( + access_policy: str, + service_perimeter: str, + ) -> str: """Returns a fully-qualified service_perimeter string.""" - return "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) + return "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format( + access_policy=access_policy, + service_perimeter=service_perimeter, + ) @staticmethod - def parse_service_perimeter_path(path: str) -> Dict[str,str]: + def parse_service_perimeter_path(path: str) -> Dict[str, str]: """Parses a service_perimeter path into its component segments.""" - m = re.match(r"^accessPolicies/(?P.+?)/servicePerimeters/(?P.+?)$", path) + m = re.match( + r"^accessPolicies/(?P.+?)/servicePerimeters/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -382,14 +468,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = AssetServiceClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -402,7 +492,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -427,7 +519,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -450,7 +544,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint) -> str: + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: """Return the API endpoint used by the client. Args: @@ -466,17 +562,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = AssetServiceClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = AssetServiceClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -512,15 +616,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -553,12 +660,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the asset service client. Args: @@ -616,13 +727,21 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AssetServiceClient._read_environment_variables() - self._client_cert_source = AssetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AssetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + AssetServiceClient._read_environment_variables() + ) + self._client_cert_source = AssetServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = AssetServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. @@ -634,7 +753,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -643,30 +764,37 @@ def __init__(self, *, if transport_provided: # transport is a AssetServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly." ) self._transport = cast(AssetServiceTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - AssetServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or AssetServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[AssetServiceTransport], Callable[..., AssetServiceTransport]] = ( + transport_init: Union[ + Type[AssetServiceTransport], Callable[..., AssetServiceTransport] + ] = ( AssetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssetServiceTransport], transport) @@ -685,27 +813,36 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.asset_v1.AssetServiceClient`.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.cloud.asset.v1.AssetService", "credentialsType": None, - } + }, ) - def export_assets(self, - request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def export_assets( + self, + request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Exports assets with time and resource types to a given Cloud Storage location/BigQuery table. For Cloud Storage location destinations, the output format is newline-delimited JSON. Each @@ -789,9 +926,7 @@ def sample_export_assets(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -816,14 +951,15 @@ def sample_export_assets(): # Done; return the response. return response - def list_assets(self, - request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsPager: + def list_assets( + self, + request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAssetsPager: r"""Lists assets with time and resource types and returns paged results in response. @@ -890,10 +1026,14 @@ def sample_list_assets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -911,9 +1051,7 @@ def sample_list_assets(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -941,13 +1079,16 @@ def sample_list_assets(): # Done; return the response. return response - def batch_get_assets_history(self, - request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetAssetsHistoryResponse: + def batch_get_assets_history( + self, + request: Optional[ + Union[asset_service.BatchGetAssetsHistoryRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time window. For IAM_POLICY content, this API outputs history when the asset and its attached IAM POLICY both exist. This can @@ -1010,9 +1151,7 @@ def sample_batch_get_assets_history(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1029,14 +1168,15 @@ def sample_batch_get_assets_history(): # Done; return the response. return response - def create_feed(self, - request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + def create_feed( + self, + request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Creates a feed in a parent project/folder/organization to listen to its asset updates. @@ -1113,10 +1253,14 @@ def sample_create_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1134,9 +1278,7 @@ def sample_create_feed(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1153,14 +1295,15 @@ def sample_create_feed(): # Done; return the response. return response - def get_feed(self, - request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + def get_feed( + self, + request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Gets details about an asset feed. .. code-block:: python @@ -1225,10 +1368,14 @@ def sample_get_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1246,9 +1393,7 @@ def sample_get_feed(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1265,14 +1410,15 @@ def sample_get_feed(): # Done; return the response. return response - def list_feeds(self, - request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.ListFeedsResponse: + def list_feeds( + self, + request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent project/folder/organization. @@ -1332,10 +1478,14 @@ def sample_list_feeds(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1353,9 +1503,7 @@ def sample_list_feeds(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1372,14 +1520,15 @@ def sample_list_feeds(): # Done; return the response. return response - def update_feed(self, - request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, - *, - feed: Optional[asset_service.Feed] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + def update_feed( + self, + request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, + *, + feed: Optional[asset_service.Feed] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Updates an asset feed configuration. .. code-block:: python @@ -1448,10 +1597,14 @@ def sample_update_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [feed] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1469,9 +1622,9 @@ def sample_update_feed(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("feed.name", request.feed.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("feed.name", request.feed.name),) + ), ) # Validate the universe domain. @@ -1488,14 +1641,15 @@ def sample_update_feed(): # Done; return the response. return response - def delete_feed(self, - request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_feed( + self, + request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an asset feed. .. code-block:: python @@ -1545,10 +1699,14 @@ def sample_delete_feed(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1566,9 +1724,7 @@ def sample_delete_feed(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1582,16 +1738,17 @@ def sample_delete_feed(): metadata=metadata, ) - def search_all_resources(self, - request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - asset_types: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllResourcesPager: + def search_all_resources( + self, + request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + asset_types: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllResourcesPager: r"""Searches all Google Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllResources`` permission @@ -1759,10 +1916,14 @@ def sample_search_all_resources(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, query, asset_types] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1784,9 +1945,7 @@ def sample_search_all_resources(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -1814,15 +1973,18 @@ def sample_search_all_resources(): # Done; return the response. return response - def search_all_iam_policies(self, - request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllIamPoliciesPager: + def search_all_iam_policies( + self, + request: Optional[ + Union[asset_service.SearchAllIamPoliciesRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllIamPoliciesPager: r"""Searches all IAM policies within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllIamPolicies`` permission on the @@ -1952,10 +2114,14 @@ def sample_search_all_iam_policies(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1975,9 +2141,7 @@ def sample_search_all_iam_policies(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -2005,13 +2169,14 @@ def sample_search_all_iam_policies(): # Done; return the response. return response - def analyze_iam_policy(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeIamPolicyResponse: + def analyze_iam_policy( + self, + request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have what accesses on which resources. @@ -2075,9 +2240,9 @@ def sample_analyze_iam_policy(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("analysis_query.scope", request.analysis_query.scope),) + ), ) # Validate the universe domain. @@ -2094,13 +2259,16 @@ def sample_analyze_iam_policy(): # Done; return the response. return response - def analyze_iam_policy_longrunning(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def analyze_iam_policy_longrunning( + self, + request: Optional[ + Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Analyzes IAM policies asynchronously to answer which identities have what accesses on which resources, and writes the analysis results to a Google Cloud Storage or a BigQuery destination. For @@ -2179,14 +2347,16 @@ def sample_analyze_iam_policy_longrunning(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy_longrunning] + rpc = self._transport._wrapped_methods[ + self._transport.analyze_iam_policy_longrunning + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("analysis_query.scope", request.analysis_query.scope),) + ), ) # Validate the universe domain. @@ -2211,13 +2381,14 @@ def sample_analyze_iam_policy_longrunning(): # Done; return the response. return response - def analyze_move(self, - request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeMoveResponse: + def analyze_move( + self, + request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeMoveResponse: r"""Analyze moving a resource to a specified destination without kicking off the actual move. The analysis is best effort depending on the user's permissions of @@ -2284,9 +2455,7 @@ def sample_analyze_move(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) # Validate the universe domain. @@ -2303,13 +2472,14 @@ def sample_analyze_move(): # Done; return the response. return response - def query_assets(self, - request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.QueryAssetsResponse: + def query_assets( + self, + request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement compatible with `BigQuery Standard SQL `__. @@ -2382,9 +2552,7 @@ def sample_query_assets(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2401,16 +2569,17 @@ def sample_query_assets(): # Done; return the response. return response - def create_saved_query(self, - request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, - *, - parent: Optional[str] = None, - saved_query: Optional[asset_service.SavedQuery] = None, - saved_query_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + def create_saved_query( + self, + request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, + *, + parent: Optional[str] = None, + saved_query: Optional[asset_service.SavedQuery] = None, + saved_query_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Creates a saved query in a parent project/folder/organization. @@ -2496,10 +2665,14 @@ def sample_create_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, saved_query, saved_query_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2521,9 +2694,7 @@ def sample_create_saved_query(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2540,14 +2711,15 @@ def sample_create_saved_query(): # Done; return the response. return response - def get_saved_query(self, - request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + def get_saved_query( + self, + request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Gets details about a saved query. .. code-block:: python @@ -2608,10 +2780,14 @@ def sample_get_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2629,9 +2805,7 @@ def sample_get_saved_query(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2648,14 +2822,15 @@ def sample_get_saved_query(): # Done; return the response. return response - def list_saved_queries(self, - request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSavedQueriesPager: + def list_saved_queries( + self, + request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSavedQueriesPager: r"""Lists all saved queries in a parent project/folder/organization. @@ -2722,10 +2897,14 @@ def sample_list_saved_queries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2743,9 +2922,7 @@ def sample_list_saved_queries(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2773,15 +2950,16 @@ def sample_list_saved_queries(): # Done; return the response. return response - def update_saved_query(self, - request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, - *, - saved_query: Optional[asset_service.SavedQuery] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + def update_saved_query( + self, + request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, + *, + saved_query: Optional[asset_service.SavedQuery] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Updates a saved query. .. code-block:: python @@ -2850,10 +3028,14 @@ def sample_update_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [saved_query, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2873,9 +3055,9 @@ def sample_update_saved_query(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("saved_query.name", request.saved_query.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("saved_query.name", request.saved_query.name),) + ), ) # Validate the universe domain. @@ -2892,14 +3074,15 @@ def sample_update_saved_query(): # Done; return the response. return response - def delete_saved_query(self, - request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_saved_query( + self, + request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a saved query. .. code-block:: python @@ -2951,10 +3134,14 @@ def sample_delete_saved_query(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2972,9 +3159,7 @@ def sample_delete_saved_query(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2988,13 +3173,16 @@ def sample_delete_saved_query(): metadata=metadata, ) - def batch_get_effective_iam_policies(self, - request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + def batch_get_effective_iam_policies( + self, + request: Optional[ + Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Gets effective IAM policies for a batch of resources. .. code-block:: python @@ -3050,14 +3238,14 @@ def sample_batch_get_effective_iam_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_get_effective_iam_policies] + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_effective_iam_policies + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -3074,16 +3262,17 @@ def sample_batch_get_effective_iam_policies(): # Done; return the response. return response - def analyze_org_policies(self, - request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPoliciesPager: + def analyze_org_policies( + self, + request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPoliciesPager: r"""Analyzes organization policies under a scope. .. code-block:: python @@ -3173,10 +3362,14 @@ def sample_analyze_org_policies(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3198,9 +3391,7 @@ def sample_analyze_org_policies(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -3228,16 +3419,19 @@ def sample_analyze_org_policies(): # Done; return the response. return response - def analyze_org_policy_governed_containers(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedContainersPager: + def analyze_org_policy_governed_containers( + self, + request: Optional[ + Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedContainersPager: r"""Analyzes organization policies governed containers (projects, folders or organization) under a scope. @@ -3327,14 +3521,20 @@ def sample_analyze_org_policy_governed_containers(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + if not isinstance( + request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest + ): request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3347,14 +3547,14 @@ def sample_analyze_org_policy_governed_containers(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_containers] + rpc = self._transport._wrapped_methods[ + self._transport.analyze_org_policy_governed_containers + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -3382,16 +3582,19 @@ def sample_analyze_org_policy_governed_containers(): # Done; return the response. return response - def analyze_org_policy_governed_assets(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: + def analyze_org_policy_governed_assets( + self, + request: Optional[ + Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict] + ] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This RPC supports custom constraints and the following 10 canned constraints: @@ -3510,10 +3713,14 @@ def sample_analyze_org_policy_governed_assets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3530,14 +3737,14 @@ def sample_analyze_org_policy_governed_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_assets] + rpc = self._transport._wrapped_methods[ + self._transport.analyze_org_policy_governed_assets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", request.scope),)), ) # Validate the universe domain. @@ -3620,8 +3827,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -3630,7 +3836,11 @@ def get_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3639,18 +3849,11 @@ def get_operation( raise e - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "AssetServiceClient", -) +__all__ = ("AssetServiceClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 5d14be97dd31..64466fe1186a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -13,19 +13,32 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets +from google.cloud.asset_v1.types import asset_service, assets class ListAssetsPager: @@ -45,14 +58,17 @@ class ListAssetsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.ListAssetsResponse], - request: asset_service.ListAssetsRequest, - response: asset_service.ListAssetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.ListAssetsResponse], + request: asset_service.ListAssetsRequest, + response: asset_service.ListAssetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -85,7 +101,12 @@ def pages(self) -> Iterator[asset_service.ListAssetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[assets.Asset]: @@ -93,7 +114,7 @@ def __iter__(self) -> Iterator[assets.Asset]: yield from page.assets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListAssetsAsyncPager: @@ -113,14 +134,17 @@ class ListAssetsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.ListAssetsResponse]], - request: asset_service.ListAssetsRequest, - response: asset_service.ListAssetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.ListAssetsResponse]], + request: asset_service.ListAssetsRequest, + response: asset_service.ListAssetsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -153,8 +177,14 @@ async def pages(self) -> AsyncIterator[asset_service.ListAssetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[assets.Asset]: async def async_generator(): async for page in self.pages: @@ -164,7 +194,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllResourcesPager: @@ -184,14 +214,17 @@ class SearchAllResourcesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.SearchAllResourcesResponse], - request: asset_service.SearchAllResourcesRequest, - response: asset_service.SearchAllResourcesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.SearchAllResourcesResponse], + request: asset_service.SearchAllResourcesRequest, + response: asset_service.SearchAllResourcesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -224,7 +257,12 @@ def pages(self) -> Iterator[asset_service.SearchAllResourcesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[assets.ResourceSearchResult]: @@ -232,7 +270,7 @@ def __iter__(self) -> Iterator[assets.ResourceSearchResult]: yield from page.results def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllResourcesAsyncPager: @@ -252,14 +290,17 @@ class SearchAllResourcesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.SearchAllResourcesResponse]], - request: asset_service.SearchAllResourcesRequest, - response: asset_service.SearchAllResourcesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.SearchAllResourcesResponse]], + request: asset_service.SearchAllResourcesRequest, + response: asset_service.SearchAllResourcesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -292,8 +333,14 @@ async def pages(self) -> AsyncIterator[asset_service.SearchAllResourcesResponse] yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[assets.ResourceSearchResult]: async def async_generator(): async for page in self.pages: @@ -303,7 +350,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllIamPoliciesPager: @@ -323,14 +370,17 @@ class SearchAllIamPoliciesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.SearchAllIamPoliciesResponse], - request: asset_service.SearchAllIamPoliciesRequest, - response: asset_service.SearchAllIamPoliciesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.SearchAllIamPoliciesResponse], + request: asset_service.SearchAllIamPoliciesRequest, + response: asset_service.SearchAllIamPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -363,7 +413,12 @@ def pages(self) -> Iterator[asset_service.SearchAllIamPoliciesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[assets.IamPolicySearchResult]: @@ -371,7 +426,7 @@ def __iter__(self) -> Iterator[assets.IamPolicySearchResult]: yield from page.results def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllIamPoliciesAsyncPager: @@ -391,14 +446,17 @@ class SearchAllIamPoliciesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.SearchAllIamPoliciesResponse]], - request: asset_service.SearchAllIamPoliciesRequest, - response: asset_service.SearchAllIamPoliciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.SearchAllIamPoliciesResponse]], + request: asset_service.SearchAllIamPoliciesRequest, + response: asset_service.SearchAllIamPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -431,8 +489,14 @@ async def pages(self) -> AsyncIterator[asset_service.SearchAllIamPoliciesRespons yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[assets.IamPolicySearchResult]: async def async_generator(): async for page in self.pages: @@ -442,7 +506,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSavedQueriesPager: @@ -462,14 +526,17 @@ class ListSavedQueriesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.ListSavedQueriesResponse], - request: asset_service.ListSavedQueriesRequest, - response: asset_service.ListSavedQueriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.ListSavedQueriesResponse], + request: asset_service.ListSavedQueriesRequest, + response: asset_service.ListSavedQueriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -502,7 +569,12 @@ def pages(self) -> Iterator[asset_service.ListSavedQueriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[asset_service.SavedQuery]: @@ -510,7 +582,7 @@ def __iter__(self) -> Iterator[asset_service.SavedQuery]: yield from page.saved_queries def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSavedQueriesAsyncPager: @@ -530,14 +602,17 @@ class ListSavedQueriesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.ListSavedQueriesResponse]], - request: asset_service.ListSavedQueriesRequest, - response: asset_service.ListSavedQueriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.ListSavedQueriesResponse]], + request: asset_service.ListSavedQueriesRequest, + response: asset_service.ListSavedQueriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -570,8 +645,14 @@ async def pages(self) -> AsyncIterator[asset_service.ListSavedQueriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.SavedQuery]: async def async_generator(): async for page in self.pages: @@ -581,7 +662,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPoliciesPager: @@ -601,14 +682,17 @@ class AnalyzeOrgPoliciesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPoliciesResponse], - request: asset_service.AnalyzeOrgPoliciesRequest, - response: asset_service.AnalyzeOrgPoliciesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.AnalyzeOrgPoliciesResponse], + request: asset_service.AnalyzeOrgPoliciesRequest, + response: asset_service.AnalyzeOrgPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -641,15 +725,22 @@ def pages(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: + def __iter__( + self, + ) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: for page in self.pages: yield from page.org_policy_results def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPoliciesAsyncPager: @@ -669,14 +760,17 @@ class AnalyzeOrgPoliciesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPoliciesResponse]], - request: asset_service.AnalyzeOrgPoliciesRequest, - response: asset_service.AnalyzeOrgPoliciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.AnalyzeOrgPoliciesResponse]], + request: asset_service.AnalyzeOrgPoliciesRequest, + response: asset_service.AnalyzeOrgPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -709,9 +803,17 @@ async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse] yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: + + def __aiter__( + self, + ) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: async def async_generator(): async for page in self.pages: for response in page.org_policy_results: @@ -720,7 +822,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedContainersPager: @@ -740,14 +842,17 @@ class AnalyzeOrgPolicyGovernedContainersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedContainersResponse], - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedContainersResponse], + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -776,19 +881,30 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + def pages( + self, + ) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: + def __iter__( + self, + ) -> Iterator[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer + ]: for page in self.pages: yield from page.governed_containers def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedContainersAsyncPager: @@ -808,14 +924,19 @@ class AnalyzeOrgPolicyGovernedContainersAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]], - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[ + ..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse] + ], + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -844,13 +965,25 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + async def pages( + self, + ) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: + + def __aiter__( + self, + ) -> AsyncIterator[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer + ]: async def async_generator(): async for page in self.pages: for response in page.governed_containers: @@ -859,7 +992,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedAssetsPager: @@ -879,14 +1012,17 @@ class AnalyzeOrgPolicyGovernedAssetsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -919,15 +1055,22 @@ def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: + def __iter__( + self, + ) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: for page in self.pages: yield from page.governed_assets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedAssetsAsyncPager: @@ -947,14 +1090,19 @@ class AnalyzeOrgPolicyGovernedAssetsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]], - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[ + ..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse] + ], + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -983,13 +1131,25 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + async def pages( + self, + ) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: + + def __aiter__( + self, + ) -> AsyncIterator[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset + ]: async def async_generator(): async for page in self.pages: for response in page.governed_assets: @@ -998,4 +1158,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py index be001a49d9f4..2784fa6856ba 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py @@ -19,20 +19,18 @@ from .base import AssetServiceTransport from .grpc import AssetServiceGrpcTransport from .grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .rest import AssetServiceRestTransport -from .rest import AssetServiceRestInterceptor - +from .rest import AssetServiceRestInterceptor, AssetServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] -_transport_registry['grpc'] = AssetServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AssetServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AssetServiceRestTransport +_transport_registry["grpc"] = AssetServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AssetServiceRestTransport __all__ = ( - 'AssetServiceTransport', - 'AssetServiceGrpcTransport', - 'AssetServiceGrpcAsyncIOTransport', - 'AssetServiceRestTransport', - 'AssetServiceRestInterceptor', + "AssetServiceTransport", + "AssetServiceGrpcTransport", + "AssetServiceGrpcAsyncIOTransport", + "AssetServiceRestTransport", + "AssetServiceRestInterceptor", ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index c910bb526406..815508d108a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -16,23 +16,22 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.asset_v1 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core +import google.auth # type: ignore +import google.protobuf +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.cloud.asset_v1 import gapic_version as package_version from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -41,24 +40,23 @@ class AssetServiceTransport(abc.ABC): """Abstract transport class for AssetService.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = 'cloudasset.googleapis.com' + DEFAULT_HOST: str = "cloudasset.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -97,31 +95,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host self._wrapped_methods: Dict[Callable, Callable] = {} @@ -322,14 +332,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -339,210 +349,248 @@ def operations_client(self): raise NotImplementedError() @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def export_assets( + self, + ) -> Callable[ + [asset_service.ExportAssetsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - Union[ - asset_service.ListAssetsResponse, - Awaitable[asset_service.ListAssetsResponse] - ]]: + def list_assets( + self, + ) -> Callable[ + [asset_service.ListAssetsRequest], + Union[ + asset_service.ListAssetsResponse, + Awaitable[asset_service.ListAssetsResponse], + ], + ]: raise NotImplementedError() @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - Union[ - asset_service.BatchGetAssetsHistoryResponse, - Awaitable[asset_service.BatchGetAssetsHistoryResponse] - ]]: + def batch_get_assets_history( + self, + ) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + Union[ + asset_service.BatchGetAssetsHistoryResponse, + Awaitable[asset_service.BatchGetAssetsHistoryResponse], + ], + ]: raise NotImplementedError() @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: + def create_feed( + self, + ) -> Callable[ + [asset_service.CreateFeedRequest], + Union[asset_service.Feed, Awaitable[asset_service.Feed]], + ]: raise NotImplementedError() @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: + def get_feed( + self, + ) -> Callable[ + [asset_service.GetFeedRequest], + Union[asset_service.Feed, Awaitable[asset_service.Feed]], + ]: raise NotImplementedError() @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - Union[ - asset_service.ListFeedsResponse, - Awaitable[asset_service.ListFeedsResponse] - ]]: + def list_feeds( + self, + ) -> Callable[ + [asset_service.ListFeedsRequest], + Union[ + asset_service.ListFeedsResponse, Awaitable[asset_service.ListFeedsResponse] + ], + ]: raise NotImplementedError() @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: + def update_feed( + self, + ) -> Callable[ + [asset_service.UpdateFeedRequest], + Union[asset_service.Feed, Awaitable[asset_service.Feed]], + ]: raise NotImplementedError() @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_feed( + self, + ) -> Callable[ + [asset_service.DeleteFeedRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - Union[ - asset_service.SearchAllResourcesResponse, - Awaitable[asset_service.SearchAllResourcesResponse] - ]]: + def search_all_resources( + self, + ) -> Callable[ + [asset_service.SearchAllResourcesRequest], + Union[ + asset_service.SearchAllResourcesResponse, + Awaitable[asset_service.SearchAllResourcesResponse], + ], + ]: raise NotImplementedError() @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - Union[ - asset_service.SearchAllIamPoliciesResponse, - Awaitable[asset_service.SearchAllIamPoliciesResponse] - ]]: + def search_all_iam_policies( + self, + ) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + Union[ + asset_service.SearchAllIamPoliciesResponse, + Awaitable[asset_service.SearchAllIamPoliciesResponse], + ], + ]: raise NotImplementedError() @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - Union[ - asset_service.AnalyzeIamPolicyResponse, - Awaitable[asset_service.AnalyzeIamPolicyResponse] - ]]: + def analyze_iam_policy( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], + Union[ + asset_service.AnalyzeIamPolicyResponse, + Awaitable[asset_service.AnalyzeIamPolicyResponse], + ], + ]: raise NotImplementedError() @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def analyze_iam_policy_longrunning( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - Union[ - asset_service.AnalyzeMoveResponse, - Awaitable[asset_service.AnalyzeMoveResponse] - ]]: + def analyze_move( + self, + ) -> Callable[ + [asset_service.AnalyzeMoveRequest], + Union[ + asset_service.AnalyzeMoveResponse, + Awaitable[asset_service.AnalyzeMoveResponse], + ], + ]: raise NotImplementedError() @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - Union[ - asset_service.QueryAssetsResponse, - Awaitable[asset_service.QueryAssetsResponse] - ]]: + def query_assets( + self, + ) -> Callable[ + [asset_service.QueryAssetsRequest], + Union[ + asset_service.QueryAssetsResponse, + Awaitable[asset_service.QueryAssetsResponse], + ], + ]: raise NotImplementedError() @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: + def create_saved_query( + self, + ) -> Callable[ + [asset_service.CreateSavedQueryRequest], + Union[asset_service.SavedQuery, Awaitable[asset_service.SavedQuery]], + ]: raise NotImplementedError() @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: + def get_saved_query( + self, + ) -> Callable[ + [asset_service.GetSavedQueryRequest], + Union[asset_service.SavedQuery, Awaitable[asset_service.SavedQuery]], + ]: raise NotImplementedError() @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - Union[ - asset_service.ListSavedQueriesResponse, - Awaitable[asset_service.ListSavedQueriesResponse] - ]]: + def list_saved_queries( + self, + ) -> Callable[ + [asset_service.ListSavedQueriesRequest], + Union[ + asset_service.ListSavedQueriesResponse, + Awaitable[asset_service.ListSavedQueriesResponse], + ], + ]: raise NotImplementedError() @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: + def update_saved_query( + self, + ) -> Callable[ + [asset_service.UpdateSavedQueryRequest], + Union[asset_service.SavedQuery, Awaitable[asset_service.SavedQuery]], + ]: raise NotImplementedError() @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_saved_query( + self, + ) -> Callable[ + [asset_service.DeleteSavedQueryRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - Union[ - asset_service.BatchGetEffectiveIamPoliciesResponse, - Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse] - ]]: + def batch_get_effective_iam_policies( + self, + ) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + Union[ + asset_service.BatchGetEffectiveIamPoliciesResponse, + Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse], + ], + ]: raise NotImplementedError() @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - Union[ - asset_service.AnalyzeOrgPoliciesResponse, - Awaitable[asset_service.AnalyzeOrgPoliciesResponse] - ]]: + def analyze_org_policies( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + Union[ + asset_service.AnalyzeOrgPoliciesResponse, + Awaitable[asset_service.AnalyzeOrgPoliciesResponse], + ], + ]: raise NotImplementedError() @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - Union[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse] - ]]: + def analyze_org_policy_governed_containers( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + Union[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse], + ], + ]: raise NotImplementedError() @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - Union[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse] - ]]: + def analyze_org_policy_governed_assets( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + Union[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], + ], + ]: raise NotImplementedError() @property @@ -559,6 +607,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'AssetServiceTransport', -) +__all__ = ("AssetServiceTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 7fa1f751fabd..b970d4f9eb4f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -19,25 +19,23 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson +import google.auth # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.message - import grpc # type: ignore import proto # type: ignore - +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson + +from .base import DEFAULT_CLIENT_INFO, AssetServiceTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -47,7 +45,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -68,7 +68,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -79,7 +79,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -94,7 +98,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": client_call_details.method, "response": grpc_response, @@ -116,23 +120,26 @@ class AssetServiceGrpcTransport(AssetServiceTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -260,19 +267,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -308,13 +319,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -334,9 +344,9 @@ def operations_client(self) -> operations_v1.OperationsClient: return self._operations_client @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - operations_pb2.Operation]: + def export_assets( + self, + ) -> Callable[[asset_service.ExportAssetsRequest], operations_pb2.Operation]: r"""Return a callable for the export assets method over gRPC. Exports assets with time and resource types to a given Cloud @@ -363,18 +373,18 @@ def export_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ExportAssets', + if "export_assets" not in self._stubs: + self._stubs["export_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ExportAssets", request_serializer=asset_service.ExportAssetsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['export_assets'] + return self._stubs["export_assets"] @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - asset_service.ListAssetsResponse]: + def list_assets( + self, + ) -> Callable[[asset_service.ListAssetsRequest], asset_service.ListAssetsResponse]: r"""Return a callable for the list assets method over gRPC. Lists assets with time and resource types and returns @@ -390,18 +400,21 @@ def list_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListAssets', + if "list_assets" not in self._stubs: + self._stubs["list_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListAssets", request_serializer=asset_service.ListAssetsRequest.serialize, response_deserializer=asset_service.ListAssetsResponse.deserialize, ) - return self._stubs['list_assets'] + return self._stubs["list_assets"] @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - asset_service.BatchGetAssetsHistoryResponse]: + def batch_get_assets_history( + self, + ) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + asset_service.BatchGetAssetsHistoryResponse, + ]: r"""Return a callable for the batch get assets history method over gRPC. Batch gets the update history of assets that overlap a time @@ -422,18 +435,18 @@ def batch_get_assets_history(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', + if "batch_get_assets_history" not in self._stubs: + self._stubs["batch_get_assets_history"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory", request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, ) - return self._stubs['batch_get_assets_history'] + return self._stubs["batch_get_assets_history"] @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - asset_service.Feed]: + def create_feed( + self, + ) -> Callable[[asset_service.CreateFeedRequest], asset_service.Feed]: r"""Return a callable for the create feed method over gRPC. Creates a feed in a parent @@ -450,18 +463,16 @@ def create_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateFeed', + if "create_feed" not in self._stubs: + self._stubs["create_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateFeed", request_serializer=asset_service.CreateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['create_feed'] + return self._stubs["create_feed"] @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - asset_service.Feed]: + def get_feed(self) -> Callable[[asset_service.GetFeedRequest], asset_service.Feed]: r"""Return a callable for the get feed method over gRPC. Gets details about an asset feed. @@ -476,18 +487,18 @@ def get_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetFeed', + if "get_feed" not in self._stubs: + self._stubs["get_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetFeed", request_serializer=asset_service.GetFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['get_feed'] + return self._stubs["get_feed"] @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - asset_service.ListFeedsResponse]: + def list_feeds( + self, + ) -> Callable[[asset_service.ListFeedsRequest], asset_service.ListFeedsResponse]: r"""Return a callable for the list feeds method over gRPC. Lists all asset feeds in a parent @@ -503,18 +514,18 @@ def list_feeds(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListFeeds', + if "list_feeds" not in self._stubs: + self._stubs["list_feeds"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListFeeds", request_serializer=asset_service.ListFeedsRequest.serialize, response_deserializer=asset_service.ListFeedsResponse.deserialize, ) - return self._stubs['list_feeds'] + return self._stubs["list_feeds"] @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - asset_service.Feed]: + def update_feed( + self, + ) -> Callable[[asset_service.UpdateFeedRequest], asset_service.Feed]: r"""Return a callable for the update feed method over gRPC. Updates an asset feed configuration. @@ -529,18 +540,18 @@ def update_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateFeed', + if "update_feed" not in self._stubs: + self._stubs["update_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateFeed", request_serializer=asset_service.UpdateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['update_feed'] + return self._stubs["update_feed"] @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - empty_pb2.Empty]: + def delete_feed( + self, + ) -> Callable[[asset_service.DeleteFeedRequest], empty_pb2.Empty]: r"""Return a callable for the delete feed method over gRPC. Deletes an asset feed. @@ -555,18 +566,21 @@ def delete_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteFeed', + if "delete_feed" not in self._stubs: + self._stubs["delete_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteFeed", request_serializer=asset_service.DeleteFeedRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_feed'] + return self._stubs["delete_feed"] @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - asset_service.SearchAllResourcesResponse]: + def search_all_resources( + self, + ) -> Callable[ + [asset_service.SearchAllResourcesRequest], + asset_service.SearchAllResourcesResponse, + ]: r"""Return a callable for the search all resources method over gRPC. Searches all Google Cloud resources within the specified scope, @@ -584,18 +598,21 @@ def search_all_resources(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllResources', + if "search_all_resources" not in self._stubs: + self._stubs["search_all_resources"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllResources", request_serializer=asset_service.SearchAllResourcesRequest.serialize, response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, ) - return self._stubs['search_all_resources'] + return self._stubs["search_all_resources"] @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - asset_service.SearchAllIamPoliciesResponse]: + def search_all_iam_policies( + self, + ) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + asset_service.SearchAllIamPoliciesResponse, + ]: r"""Return a callable for the search all iam policies method over gRPC. Searches all IAM policies within the specified scope, such as a @@ -613,18 +630,20 @@ def search_all_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', + if "search_all_iam_policies" not in self._stubs: + self._stubs["search_all_iam_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllIamPolicies", request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, ) - return self._stubs['search_all_iam_policies'] + return self._stubs["search_all_iam_policies"] @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - asset_service.AnalyzeIamPolicyResponse]: + def analyze_iam_policy( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], asset_service.AnalyzeIamPolicyResponse + ]: r"""Return a callable for the analyze iam policy method over gRPC. Analyzes IAM policies to answer which identities have @@ -640,18 +659,20 @@ def analyze_iam_policy(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', + if "analyze_iam_policy" not in self._stubs: + self._stubs["analyze_iam_policy"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy", request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, ) - return self._stubs['analyze_iam_policy'] + return self._stubs["analyze_iam_policy"] @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - operations_pb2.Operation]: + def analyze_iam_policy_longrunning( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], operations_pb2.Operation + ]: r"""Return a callable for the analyze iam policy longrunning method over gRPC. Analyzes IAM policies asynchronously to answer which identities @@ -677,18 +698,22 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', - request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + if "analyze_iam_policy_longrunning" not in self._stubs: + self._stubs["analyze_iam_policy_longrunning"] = ( + self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning", + request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) - return self._stubs['analyze_iam_policy_longrunning'] + return self._stubs["analyze_iam_policy_longrunning"] @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - asset_service.AnalyzeMoveResponse]: + def analyze_move( + self, + ) -> Callable[ + [asset_service.AnalyzeMoveRequest], asset_service.AnalyzeMoveResponse + ]: r"""Return a callable for the analyze move method over gRPC. Analyze moving a resource to a specified destination @@ -709,18 +734,20 @@ def analyze_move(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeMove', + if "analyze_move" not in self._stubs: + self._stubs["analyze_move"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeMove", request_serializer=asset_service.AnalyzeMoveRequest.serialize, response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, ) - return self._stubs['analyze_move'] + return self._stubs["analyze_move"] @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - asset_service.QueryAssetsResponse]: + def query_assets( + self, + ) -> Callable[ + [asset_service.QueryAssetsRequest], asset_service.QueryAssetsResponse + ]: r"""Return a callable for the query assets method over gRPC. Issue a job that queries assets using a SQL statement compatible @@ -750,18 +777,18 @@ def query_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/QueryAssets', + if "query_assets" not in self._stubs: + self._stubs["query_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/QueryAssets", request_serializer=asset_service.QueryAssetsRequest.serialize, response_deserializer=asset_service.QueryAssetsResponse.deserialize, ) - return self._stubs['query_assets'] + return self._stubs["query_assets"] @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - asset_service.SavedQuery]: + def create_saved_query( + self, + ) -> Callable[[asset_service.CreateSavedQueryRequest], asset_service.SavedQuery]: r"""Return a callable for the create saved query method over gRPC. Creates a saved query in a parent @@ -777,18 +804,18 @@ def create_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateSavedQuery', + if "create_saved_query" not in self._stubs: + self._stubs["create_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateSavedQuery", request_serializer=asset_service.CreateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['create_saved_query'] + return self._stubs["create_saved_query"] @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - asset_service.SavedQuery]: + def get_saved_query( + self, + ) -> Callable[[asset_service.GetSavedQueryRequest], asset_service.SavedQuery]: r"""Return a callable for the get saved query method over gRPC. Gets details about a saved query. @@ -803,18 +830,20 @@ def get_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetSavedQuery', + if "get_saved_query" not in self._stubs: + self._stubs["get_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetSavedQuery", request_serializer=asset_service.GetSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['get_saved_query'] + return self._stubs["get_saved_query"] @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - asset_service.ListSavedQueriesResponse]: + def list_saved_queries( + self, + ) -> Callable[ + [asset_service.ListSavedQueriesRequest], asset_service.ListSavedQueriesResponse + ]: r"""Return a callable for the list saved queries method over gRPC. Lists all saved queries in a parent @@ -830,18 +859,18 @@ def list_saved_queries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListSavedQueries', + if "list_saved_queries" not in self._stubs: + self._stubs["list_saved_queries"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListSavedQueries", request_serializer=asset_service.ListSavedQueriesRequest.serialize, response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, ) - return self._stubs['list_saved_queries'] + return self._stubs["list_saved_queries"] @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - asset_service.SavedQuery]: + def update_saved_query( + self, + ) -> Callable[[asset_service.UpdateSavedQueryRequest], asset_service.SavedQuery]: r"""Return a callable for the update saved query method over gRPC. Updates a saved query. @@ -856,18 +885,18 @@ def update_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', + if "update_saved_query" not in self._stubs: + self._stubs["update_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateSavedQuery", request_serializer=asset_service.UpdateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['update_saved_query'] + return self._stubs["update_saved_query"] @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - empty_pb2.Empty]: + def delete_saved_query( + self, + ) -> Callable[[asset_service.DeleteSavedQueryRequest], empty_pb2.Empty]: r"""Return a callable for the delete saved query method over gRPC. Deletes a saved query. @@ -882,18 +911,21 @@ def delete_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', + if "delete_saved_query" not in self._stubs: + self._stubs["delete_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteSavedQuery", request_serializer=asset_service.DeleteSavedQueryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_saved_query'] + return self._stubs["delete_saved_query"] @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - asset_service.BatchGetEffectiveIamPoliciesResponse]: + def batch_get_effective_iam_policies( + self, + ) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + asset_service.BatchGetEffectiveIamPoliciesResponse, + ]: r"""Return a callable for the batch get effective iam policies method over gRPC. @@ -909,18 +941,23 @@ def batch_get_effective_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', - request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, - response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, + if "batch_get_effective_iam_policies" not in self._stubs: + self._stubs["batch_get_effective_iam_policies"] = ( + self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies", + request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, + response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, + ) ) - return self._stubs['batch_get_effective_iam_policies'] + return self._stubs["batch_get_effective_iam_policies"] @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - asset_service.AnalyzeOrgPoliciesResponse]: + def analyze_org_policies( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + asset_service.AnalyzeOrgPoliciesResponse, + ]: r"""Return a callable for the analyze org policies method over gRPC. Analyzes organization policies under a scope. @@ -935,18 +972,21 @@ def analyze_org_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', + if "analyze_org_policies" not in self._stubs: + self._stubs["analyze_org_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies", request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, ) - return self._stubs['analyze_org_policies'] + return self._stubs["analyze_org_policies"] @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + def analyze_org_policy_governed_containers( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + ]: r"""Return a callable for the analyze org policy governed containers method over gRPC. @@ -963,18 +1003,23 @@ def analyze_org_policy_governed_containers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', - request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, - response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, + if "analyze_org_policy_governed_containers" not in self._stubs: + self._stubs["analyze_org_policy_governed_containers"] = ( + self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers", + request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, + ) ) - return self._stubs['analyze_org_policy_governed_containers'] + return self._stubs["analyze_org_policy_governed_containers"] @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + def analyze_org_policy_governed_assets( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + ]: r"""Return a callable for the analyze org policy governed assets method over gRPC. @@ -1008,13 +1053,15 @@ def analyze_org_policy_governed_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', - request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, - response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, + if "analyze_org_policy_governed_assets" not in self._stubs: + self._stubs["analyze_org_policy_governed_assets"] = ( + self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets", + request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, + ) ) - return self._stubs['analyze_org_policy_governed_assets'] + return self._stubs["analyze_org_policy_governed_assets"] def close(self): self._logged_channel.close() @@ -1023,8 +1070,7 @@ def close(self): def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1042,6 +1088,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'AssetServiceGrpcTransport', -) +__all__ = ("AssetServiceGrpcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index c5119687bbab..87b12c91f37b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -15,33 +15,31 @@ # import inspect import json -import pickle import logging as std_logging +import pickle import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.asset_v1.types import asset_service +from google.longrunning import operations_pb2 # type: ignore from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, AssetServiceTransport from .grpc import AssetServiceGrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -49,9 +47,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -72,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -83,7 +85,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -98,7 +104,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -125,13 +131,15 @@ class AssetServiceGrpcAsyncIOTransport(AssetServiceTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -162,24 +170,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -309,7 +319,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -340,9 +352,11 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: return self._operations_client @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - Awaitable[operations_pb2.Operation]]: + def export_assets( + self, + ) -> Callable[ + [asset_service.ExportAssetsRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the export assets method over gRPC. Exports assets with time and resource types to a given Cloud @@ -369,18 +383,20 @@ def export_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ExportAssets', + if "export_assets" not in self._stubs: + self._stubs["export_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ExportAssets", request_serializer=asset_service.ExportAssetsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['export_assets'] + return self._stubs["export_assets"] @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - Awaitable[asset_service.ListAssetsResponse]]: + def list_assets( + self, + ) -> Callable[ + [asset_service.ListAssetsRequest], Awaitable[asset_service.ListAssetsResponse] + ]: r"""Return a callable for the list assets method over gRPC. Lists assets with time and resource types and returns @@ -396,18 +412,21 @@ def list_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListAssets', + if "list_assets" not in self._stubs: + self._stubs["list_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListAssets", request_serializer=asset_service.ListAssetsRequest.serialize, response_deserializer=asset_service.ListAssetsResponse.deserialize, ) - return self._stubs['list_assets'] + return self._stubs["list_assets"] @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - Awaitable[asset_service.BatchGetAssetsHistoryResponse]]: + def batch_get_assets_history( + self, + ) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + Awaitable[asset_service.BatchGetAssetsHistoryResponse], + ]: r"""Return a callable for the batch get assets history method over gRPC. Batch gets the update history of assets that overlap a time @@ -428,18 +447,18 @@ def batch_get_assets_history(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', + if "batch_get_assets_history" not in self._stubs: + self._stubs["batch_get_assets_history"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory", request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, ) - return self._stubs['batch_get_assets_history'] + return self._stubs["batch_get_assets_history"] @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - Awaitable[asset_service.Feed]]: + def create_feed( + self, + ) -> Callable[[asset_service.CreateFeedRequest], Awaitable[asset_service.Feed]]: r"""Return a callable for the create feed method over gRPC. Creates a feed in a parent @@ -456,18 +475,18 @@ def create_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateFeed', + if "create_feed" not in self._stubs: + self._stubs["create_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateFeed", request_serializer=asset_service.CreateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['create_feed'] + return self._stubs["create_feed"] @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - Awaitable[asset_service.Feed]]: + def get_feed( + self, + ) -> Callable[[asset_service.GetFeedRequest], Awaitable[asset_service.Feed]]: r"""Return a callable for the get feed method over gRPC. Gets details about an asset feed. @@ -482,18 +501,20 @@ def get_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetFeed', + if "get_feed" not in self._stubs: + self._stubs["get_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetFeed", request_serializer=asset_service.GetFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['get_feed'] + return self._stubs["get_feed"] @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - Awaitable[asset_service.ListFeedsResponse]]: + def list_feeds( + self, + ) -> Callable[ + [asset_service.ListFeedsRequest], Awaitable[asset_service.ListFeedsResponse] + ]: r"""Return a callable for the list feeds method over gRPC. Lists all asset feeds in a parent @@ -509,18 +530,18 @@ def list_feeds(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListFeeds', + if "list_feeds" not in self._stubs: + self._stubs["list_feeds"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListFeeds", request_serializer=asset_service.ListFeedsRequest.serialize, response_deserializer=asset_service.ListFeedsResponse.deserialize, ) - return self._stubs['list_feeds'] + return self._stubs["list_feeds"] @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - Awaitable[asset_service.Feed]]: + def update_feed( + self, + ) -> Callable[[asset_service.UpdateFeedRequest], Awaitable[asset_service.Feed]]: r"""Return a callable for the update feed method over gRPC. Updates an asset feed configuration. @@ -535,18 +556,18 @@ def update_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateFeed', + if "update_feed" not in self._stubs: + self._stubs["update_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateFeed", request_serializer=asset_service.UpdateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['update_feed'] + return self._stubs["update_feed"] @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - Awaitable[empty_pb2.Empty]]: + def delete_feed( + self, + ) -> Callable[[asset_service.DeleteFeedRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete feed method over gRPC. Deletes an asset feed. @@ -561,18 +582,21 @@ def delete_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteFeed', + if "delete_feed" not in self._stubs: + self._stubs["delete_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteFeed", request_serializer=asset_service.DeleteFeedRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_feed'] + return self._stubs["delete_feed"] @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - Awaitable[asset_service.SearchAllResourcesResponse]]: + def search_all_resources( + self, + ) -> Callable[ + [asset_service.SearchAllResourcesRequest], + Awaitable[asset_service.SearchAllResourcesResponse], + ]: r"""Return a callable for the search all resources method over gRPC. Searches all Google Cloud resources within the specified scope, @@ -590,18 +614,21 @@ def search_all_resources(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllResources', + if "search_all_resources" not in self._stubs: + self._stubs["search_all_resources"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllResources", request_serializer=asset_service.SearchAllResourcesRequest.serialize, response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, ) - return self._stubs['search_all_resources'] + return self._stubs["search_all_resources"] @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - Awaitable[asset_service.SearchAllIamPoliciesResponse]]: + def search_all_iam_policies( + self, + ) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + Awaitable[asset_service.SearchAllIamPoliciesResponse], + ]: r"""Return a callable for the search all iam policies method over gRPC. Searches all IAM policies within the specified scope, such as a @@ -619,18 +646,21 @@ def search_all_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', + if "search_all_iam_policies" not in self._stubs: + self._stubs["search_all_iam_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllIamPolicies", request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, ) - return self._stubs['search_all_iam_policies'] + return self._stubs["search_all_iam_policies"] @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - Awaitable[asset_service.AnalyzeIamPolicyResponse]]: + def analyze_iam_policy( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], + Awaitable[asset_service.AnalyzeIamPolicyResponse], + ]: r"""Return a callable for the analyze iam policy method over gRPC. Analyzes IAM policies to answer which identities have @@ -646,18 +676,21 @@ def analyze_iam_policy(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', + if "analyze_iam_policy" not in self._stubs: + self._stubs["analyze_iam_policy"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy", request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, ) - return self._stubs['analyze_iam_policy'] + return self._stubs["analyze_iam_policy"] @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - Awaitable[operations_pb2.Operation]]: + def analyze_iam_policy_longrunning( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], + Awaitable[operations_pb2.Operation], + ]: r"""Return a callable for the analyze iam policy longrunning method over gRPC. Analyzes IAM policies asynchronously to answer which identities @@ -683,18 +716,22 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', - request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, + if "analyze_iam_policy_longrunning" not in self._stubs: + self._stubs["analyze_iam_policy_longrunning"] = ( + self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning", + request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) ) - return self._stubs['analyze_iam_policy_longrunning'] + return self._stubs["analyze_iam_policy_longrunning"] @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - Awaitable[asset_service.AnalyzeMoveResponse]]: + def analyze_move( + self, + ) -> Callable[ + [asset_service.AnalyzeMoveRequest], Awaitable[asset_service.AnalyzeMoveResponse] + ]: r"""Return a callable for the analyze move method over gRPC. Analyze moving a resource to a specified destination @@ -715,18 +752,20 @@ def analyze_move(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeMove', + if "analyze_move" not in self._stubs: + self._stubs["analyze_move"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeMove", request_serializer=asset_service.AnalyzeMoveRequest.serialize, response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, ) - return self._stubs['analyze_move'] + return self._stubs["analyze_move"] @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - Awaitable[asset_service.QueryAssetsResponse]]: + def query_assets( + self, + ) -> Callable[ + [asset_service.QueryAssetsRequest], Awaitable[asset_service.QueryAssetsResponse] + ]: r"""Return a callable for the query assets method over gRPC. Issue a job that queries assets using a SQL statement compatible @@ -756,18 +795,20 @@ def query_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/QueryAssets', + if "query_assets" not in self._stubs: + self._stubs["query_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/QueryAssets", request_serializer=asset_service.QueryAssetsRequest.serialize, response_deserializer=asset_service.QueryAssetsResponse.deserialize, ) - return self._stubs['query_assets'] + return self._stubs["query_assets"] @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: + def create_saved_query( + self, + ) -> Callable[ + [asset_service.CreateSavedQueryRequest], Awaitable[asset_service.SavedQuery] + ]: r"""Return a callable for the create saved query method over gRPC. Creates a saved query in a parent @@ -783,18 +824,20 @@ def create_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateSavedQuery', + if "create_saved_query" not in self._stubs: + self._stubs["create_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateSavedQuery", request_serializer=asset_service.CreateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['create_saved_query'] + return self._stubs["create_saved_query"] @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: + def get_saved_query( + self, + ) -> Callable[ + [asset_service.GetSavedQueryRequest], Awaitable[asset_service.SavedQuery] + ]: r"""Return a callable for the get saved query method over gRPC. Gets details about a saved query. @@ -809,18 +852,21 @@ def get_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetSavedQuery', + if "get_saved_query" not in self._stubs: + self._stubs["get_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetSavedQuery", request_serializer=asset_service.GetSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['get_saved_query'] + return self._stubs["get_saved_query"] @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - Awaitable[asset_service.ListSavedQueriesResponse]]: + def list_saved_queries( + self, + ) -> Callable[ + [asset_service.ListSavedQueriesRequest], + Awaitable[asset_service.ListSavedQueriesResponse], + ]: r"""Return a callable for the list saved queries method over gRPC. Lists all saved queries in a parent @@ -836,18 +882,20 @@ def list_saved_queries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListSavedQueries', + if "list_saved_queries" not in self._stubs: + self._stubs["list_saved_queries"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListSavedQueries", request_serializer=asset_service.ListSavedQueriesRequest.serialize, response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, ) - return self._stubs['list_saved_queries'] + return self._stubs["list_saved_queries"] @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: + def update_saved_query( + self, + ) -> Callable[ + [asset_service.UpdateSavedQueryRequest], Awaitable[asset_service.SavedQuery] + ]: r"""Return a callable for the update saved query method over gRPC. Updates a saved query. @@ -862,18 +910,18 @@ def update_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', + if "update_saved_query" not in self._stubs: + self._stubs["update_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateSavedQuery", request_serializer=asset_service.UpdateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['update_saved_query'] + return self._stubs["update_saved_query"] @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - Awaitable[empty_pb2.Empty]]: + def delete_saved_query( + self, + ) -> Callable[[asset_service.DeleteSavedQueryRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete saved query method over gRPC. Deletes a saved query. @@ -888,18 +936,21 @@ def delete_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', + if "delete_saved_query" not in self._stubs: + self._stubs["delete_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteSavedQuery", request_serializer=asset_service.DeleteSavedQueryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_saved_query'] + return self._stubs["delete_saved_query"] @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse]]: + def batch_get_effective_iam_policies( + self, + ) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse], + ]: r"""Return a callable for the batch get effective iam policies method over gRPC. @@ -915,18 +966,23 @@ def batch_get_effective_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', - request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, - response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, + if "batch_get_effective_iam_policies" not in self._stubs: + self._stubs["batch_get_effective_iam_policies"] = ( + self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies", + request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, + response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, + ) ) - return self._stubs['batch_get_effective_iam_policies'] + return self._stubs["batch_get_effective_iam_policies"] @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - Awaitable[asset_service.AnalyzeOrgPoliciesResponse]]: + def analyze_org_policies( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + Awaitable[asset_service.AnalyzeOrgPoliciesResponse], + ]: r"""Return a callable for the analyze org policies method over gRPC. Analyzes organization policies under a scope. @@ -941,18 +997,21 @@ def analyze_org_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', + if "analyze_org_policies" not in self._stubs: + self._stubs["analyze_org_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies", request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, ) - return self._stubs['analyze_org_policies'] + return self._stubs["analyze_org_policies"] @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]]: + def analyze_org_policy_governed_containers( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse], + ]: r"""Return a callable for the analyze org policy governed containers method over gRPC. @@ -969,18 +1028,23 @@ def analyze_org_policy_governed_containers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', - request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, - response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, + if "analyze_org_policy_governed_containers" not in self._stubs: + self._stubs["analyze_org_policy_governed_containers"] = ( + self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers", + request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, + ) ) - return self._stubs['analyze_org_policy_governed_containers'] + return self._stubs["analyze_org_policy_governed_containers"] @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]]: + def analyze_org_policy_governed_assets( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], + ]: r"""Return a callable for the analyze org policy governed assets method over gRPC. @@ -1014,16 +1078,18 @@ def analyze_org_policy_governed_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', - request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, - response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, + if "analyze_org_policy_governed_assets" not in self._stubs: + self._stubs["analyze_org_policy_governed_assets"] = ( + self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets", + request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, + ) ) - return self._stubs['analyze_org_policy_governed_assets'] + return self._stubs["analyze_org_policy_governed_assets"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.export_assets: self._wrap_method( self.export_assets, @@ -1232,8 +1298,7 @@ def kind(self) -> str: def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1247,6 +1312,4 @@ def get_operation( return self._stubs["get_operation"] -__all__ = ( - 'AssetServiceGrpcAsyncIOTransport', -) +__all__ = ("AssetServiceGrpcAsyncIOTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index fb8b592dd771..ea31ba5d1daa 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -13,34 +13,26 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging +import dataclasses import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +import google.protobuf +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore from google.cloud.asset_v1.types import asset_service -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version - -from .rest_base import _BaseAssetServiceRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseAssetServiceRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -49,6 +41,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -261,7 +254,14 @@ def post_update_saved_query(self, response): """ - def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_analyze_iam_policy( + self, + request: asset_service.AnalyzeIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for analyze_iam_policy Override in a subclass to manipulate the request or metadata @@ -269,7 +269,9 @@ def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, """ return request, metadata - def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyResponse) -> asset_service.AnalyzeIamPolicyResponse: + def post_analyze_iam_policy( + self, response: asset_service.AnalyzeIamPolicyResponse + ) -> asset_service.AnalyzeIamPolicyResponse: """Post-rpc interceptor for analyze_iam_policy DEPRECATED. Please use the `post_analyze_iam_policy_with_metadata` @@ -282,7 +284,13 @@ def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyRespon """ return response - def post_analyze_iam_policy_with_metadata(self, response: asset_service.AnalyzeIamPolicyResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_iam_policy_with_metadata( + self, + response: asset_service.AnalyzeIamPolicyResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for analyze_iam_policy Override in a subclass to read or manipulate the response or metadata after it @@ -297,7 +305,14 @@ def post_analyze_iam_policy_with_metadata(self, response: asset_service.AnalyzeI """ return response, metadata - def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_iam_policy_longrunning( + self, + request: asset_service.AnalyzeIamPolicyLongrunningRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeIamPolicyLongrunningRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for analyze_iam_policy_longrunning Override in a subclass to manipulate the request or metadata @@ -305,7 +320,9 @@ def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPo """ return request, metadata - def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_analyze_iam_policy_longrunning( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for analyze_iam_policy_longrunning DEPRECATED. Please use the `post_analyze_iam_policy_longrunning_with_metadata` @@ -318,7 +335,11 @@ def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation """ return response - def post_analyze_iam_policy_longrunning_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_iam_policy_longrunning_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for analyze_iam_policy_longrunning Override in a subclass to read or manipulate the response or metadata after it @@ -333,7 +354,13 @@ def post_analyze_iam_policy_longrunning_with_metadata(self, response: operations """ return response, metadata - def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_move( + self, + request: asset_service.AnalyzeMoveRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for analyze_move Override in a subclass to manipulate the request or metadata @@ -341,7 +368,9 @@ def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: """ return request, metadata - def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asset_service.AnalyzeMoveResponse: + def post_analyze_move( + self, response: asset_service.AnalyzeMoveResponse + ) -> asset_service.AnalyzeMoveResponse: """Post-rpc interceptor for analyze_move DEPRECATED. Please use the `post_analyze_move_with_metadata` @@ -354,7 +383,13 @@ def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asse """ return response - def post_analyze_move_with_metadata(self, response: asset_service.AnalyzeMoveResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_move_with_metadata( + self, + response: asset_service.AnalyzeMoveResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeMoveResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for analyze_move Override in a subclass to read or manipulate the response or metadata after it @@ -369,7 +404,13 @@ def post_analyze_move_with_metadata(self, response: asset_service.AnalyzeMoveRes """ return response, metadata - def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_org_policies( + self, + request: asset_service.AnalyzeOrgPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for analyze_org_policies Override in a subclass to manipulate the request or metadata @@ -377,7 +418,9 @@ def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequ """ return request, metadata - def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesResponse) -> asset_service.AnalyzeOrgPoliciesResponse: + def post_analyze_org_policies( + self, response: asset_service.AnalyzeOrgPoliciesResponse + ) -> asset_service.AnalyzeOrgPoliciesResponse: """Post-rpc interceptor for analyze_org_policies DEPRECATED. Please use the `post_analyze_org_policies_with_metadata` @@ -390,7 +433,14 @@ def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesRe """ return response - def post_analyze_org_policies_with_metadata(self, response: asset_service.AnalyzeOrgPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_org_policies_with_metadata( + self, + response: asset_service.AnalyzeOrgPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for analyze_org_policies Override in a subclass to read or manipulate the response or metadata after it @@ -405,7 +455,14 @@ def post_analyze_org_policies_with_metadata(self, response: asset_service.Analyz """ return response, metadata - def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_org_policy_governed_assets( + self, + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for analyze_org_policy_governed_assets Override in a subclass to manipulate the request or metadata @@ -413,7 +470,9 @@ def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeO """ return request, metadata - def post_analyze_org_policy_governed_assets(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + def post_analyze_org_policy_governed_assets( + self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse + ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: """Post-rpc interceptor for analyze_org_policy_governed_assets DEPRECATED. Please use the `post_analyze_org_policy_governed_assets_with_metadata` @@ -426,7 +485,14 @@ def post_analyze_org_policy_governed_assets(self, response: asset_service.Analyz """ return response - def post_analyze_org_policy_governed_assets_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_org_policy_governed_assets_with_metadata( + self, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for analyze_org_policy_governed_assets Override in a subclass to read or manipulate the response or metadata after it @@ -441,7 +507,14 @@ def post_analyze_org_policy_governed_assets_with_metadata(self, response: asset_ """ return response, metadata - def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_org_policy_governed_containers( + self, + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for analyze_org_policy_governed_containers Override in a subclass to manipulate the request or metadata @@ -449,7 +522,9 @@ def pre_analyze_org_policy_governed_containers(self, request: asset_service.Anal """ return request, metadata - def post_analyze_org_policy_governed_containers(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + def post_analyze_org_policy_governed_containers( + self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse + ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: """Post-rpc interceptor for analyze_org_policy_governed_containers DEPRECATED. Please use the `post_analyze_org_policy_governed_containers_with_metadata` @@ -462,7 +537,14 @@ def post_analyze_org_policy_governed_containers(self, response: asset_service.An """ return response - def post_analyze_org_policy_governed_containers_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_org_policy_governed_containers_with_metadata( + self, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for analyze_org_policy_governed_containers Override in a subclass to read or manipulate the response or metadata after it @@ -477,7 +559,14 @@ def post_analyze_org_policy_governed_containers_with_metadata(self, response: as """ return response, metadata - def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_batch_get_assets_history( + self, + request: asset_service.BatchGetAssetsHistoryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.BatchGetAssetsHistoryRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for batch_get_assets_history Override in a subclass to manipulate the request or metadata @@ -485,7 +574,9 @@ def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHist """ return request, metadata - def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHistoryResponse) -> asset_service.BatchGetAssetsHistoryResponse: + def post_batch_get_assets_history( + self, response: asset_service.BatchGetAssetsHistoryResponse + ) -> asset_service.BatchGetAssetsHistoryResponse: """Post-rpc interceptor for batch_get_assets_history DEPRECATED. Please use the `post_batch_get_assets_history_with_metadata` @@ -498,7 +589,14 @@ def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHi """ return response - def post_batch_get_assets_history_with_metadata(self, response: asset_service.BatchGetAssetsHistoryResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_batch_get_assets_history_with_metadata( + self, + response: asset_service.BatchGetAssetsHistoryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.BatchGetAssetsHistoryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for batch_get_assets_history Override in a subclass to read or manipulate the response or metadata after it @@ -513,7 +611,14 @@ def post_batch_get_assets_history_with_metadata(self, response: asset_service.Ba """ return response, metadata - def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_batch_get_effective_iam_policies( + self, + request: asset_service.BatchGetEffectiveIamPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.BatchGetEffectiveIamPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for batch_get_effective_iam_policies Override in a subclass to manipulate the request or metadata @@ -521,7 +626,9 @@ def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEf """ return request, metadata - def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + def post_batch_get_effective_iam_policies( + self, response: asset_service.BatchGetEffectiveIamPoliciesResponse + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: """Post-rpc interceptor for batch_get_effective_iam_policies DEPRECATED. Please use the `post_batch_get_effective_iam_policies_with_metadata` @@ -534,7 +641,14 @@ def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGet """ return response - def post_batch_get_effective_iam_policies_with_metadata(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_batch_get_effective_iam_policies_with_metadata( + self, + response: asset_service.BatchGetEffectiveIamPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.BatchGetEffectiveIamPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for batch_get_effective_iam_policies Override in a subclass to read or manipulate the response or metadata after it @@ -549,7 +663,13 @@ def post_batch_get_effective_iam_policies_with_metadata(self, response: asset_se """ return response, metadata - def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_feed( + self, + request: asset_service.CreateFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.CreateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_feed Override in a subclass to manipulate the request or metadata @@ -570,7 +690,11 @@ def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def post_create_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_feed Override in a subclass to read or manipulate the response or metadata after it @@ -585,7 +709,13 @@ def post_create_feed_with_metadata(self, response: asset_service.Feed, metadata: """ return response, metadata - def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_saved_query( + self, + request: asset_service.CreateSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_saved_query Override in a subclass to manipulate the request or metadata @@ -593,7 +723,9 @@ def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, """ return request, metadata - def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + def post_create_saved_query( + self, response: asset_service.SavedQuery + ) -> asset_service.SavedQuery: """Post-rpc interceptor for create_saved_query DEPRECATED. Please use the `post_create_saved_query_with_metadata` @@ -606,7 +738,11 @@ def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_s """ return response - def post_create_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_saved_query Override in a subclass to read or manipulate the response or metadata after it @@ -621,7 +757,13 @@ def post_create_saved_query_with_metadata(self, response: asset_service.SavedQue """ return response, metadata - def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_feed( + self, + request: asset_service.DeleteFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.DeleteFeedRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_feed Override in a subclass to manipulate the request or metadata @@ -629,7 +771,13 @@ def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Se """ return request, metadata - def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_saved_query( + self, + request: asset_service.DeleteSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_saved_query Override in a subclass to manipulate the request or metadata @@ -637,7 +785,13 @@ def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, """ return request, metadata - def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_export_assets( + self, + request: asset_service.ExportAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ExportAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for export_assets Override in a subclass to manipulate the request or metadata @@ -645,7 +799,9 @@ def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata """ return request, metadata - def post_export_assets(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_export_assets( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for export_assets DEPRECATED. Please use the `post_export_assets_with_metadata` @@ -658,7 +814,11 @@ def post_export_assets(self, response: operations_pb2.Operation) -> operations_p """ return response - def post_export_assets_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_export_assets_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for export_assets Override in a subclass to read or manipulate the response or metadata after it @@ -673,7 +833,11 @@ def post_export_assets_with_metadata(self, response: operations_pb2.Operation, m """ return response, metadata - def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_feed( + self, + request: asset_service.GetFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_feed Override in a subclass to manipulate the request or metadata @@ -694,7 +858,11 @@ def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def post_get_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_feed Override in a subclass to read or manipulate the response or metadata after it @@ -709,7 +877,13 @@ def post_get_feed_with_metadata(self, response: asset_service.Feed, metadata: Se """ return response, metadata - def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_saved_query( + self, + request: asset_service.GetSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.GetSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_saved_query Override in a subclass to manipulate the request or metadata @@ -717,7 +891,9 @@ def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metad """ return request, metadata - def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + def post_get_saved_query( + self, response: asset_service.SavedQuery + ) -> asset_service.SavedQuery: """Post-rpc interceptor for get_saved_query DEPRECATED. Please use the `post_get_saved_query_with_metadata` @@ -730,7 +906,11 @@ def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_serv """ return response - def post_get_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_saved_query Override in a subclass to read or manipulate the response or metadata after it @@ -745,7 +925,13 @@ def post_get_saved_query_with_metadata(self, response: asset_service.SavedQuery, """ return response, metadata - def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_assets( + self, + request: asset_service.ListAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_assets Override in a subclass to manipulate the request or metadata @@ -753,7 +939,9 @@ def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Se """ return request, metadata - def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_service.ListAssetsResponse: + def post_list_assets( + self, response: asset_service.ListAssetsResponse + ) -> asset_service.ListAssetsResponse: """Post-rpc interceptor for list_assets DEPRECATED. Please use the `post_list_assets_with_metadata` @@ -766,7 +954,13 @@ def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_ """ return response - def post_list_assets_with_metadata(self, response: asset_service.ListAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_assets_with_metadata( + self, + response: asset_service.ListAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for list_assets Override in a subclass to read or manipulate the response or metadata after it @@ -781,7 +975,11 @@ def post_list_assets_with_metadata(self, response: asset_service.ListAssetsRespo """ return response, metadata - def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_feeds( + self, + request: asset_service.ListFeedsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_feeds Override in a subclass to manipulate the request or metadata @@ -789,7 +987,9 @@ def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequ """ return request, metadata - def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_service.ListFeedsResponse: + def post_list_feeds( + self, response: asset_service.ListFeedsResponse + ) -> asset_service.ListFeedsResponse: """Post-rpc interceptor for list_feeds DEPRECATED. Please use the `post_list_feeds_with_metadata` @@ -802,7 +1002,13 @@ def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_se """ return response - def post_list_feeds_with_metadata(self, response: asset_service.ListFeedsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_feeds_with_metadata( + self, + response: asset_service.ListFeedsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for list_feeds Override in a subclass to read or manipulate the response or metadata after it @@ -817,7 +1023,13 @@ def post_list_feeds_with_metadata(self, response: asset_service.ListFeedsRespons """ return response, metadata - def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_saved_queries( + self, + request: asset_service.ListSavedQueriesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_saved_queries Override in a subclass to manipulate the request or metadata @@ -825,7 +1037,9 @@ def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, """ return request, metadata - def post_list_saved_queries(self, response: asset_service.ListSavedQueriesResponse) -> asset_service.ListSavedQueriesResponse: + def post_list_saved_queries( + self, response: asset_service.ListSavedQueriesResponse + ) -> asset_service.ListSavedQueriesResponse: """Post-rpc interceptor for list_saved_queries DEPRECATED. Please use the `post_list_saved_queries_with_metadata` @@ -838,7 +1052,13 @@ def post_list_saved_queries(self, response: asset_service.ListSavedQueriesRespon """ return response - def post_list_saved_queries_with_metadata(self, response: asset_service.ListSavedQueriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_saved_queries_with_metadata( + self, + response: asset_service.ListSavedQueriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListSavedQueriesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for list_saved_queries Override in a subclass to read or manipulate the response or metadata after it @@ -853,7 +1073,13 @@ def post_list_saved_queries_with_metadata(self, response: asset_service.ListSave """ return response, metadata - def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_query_assets( + self, + request: asset_service.QueryAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.QueryAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for query_assets Override in a subclass to manipulate the request or metadata @@ -861,7 +1087,9 @@ def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: """ return request, metadata - def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asset_service.QueryAssetsResponse: + def post_query_assets( + self, response: asset_service.QueryAssetsResponse + ) -> asset_service.QueryAssetsResponse: """Post-rpc interceptor for query_assets DEPRECATED. Please use the `post_query_assets_with_metadata` @@ -874,7 +1102,13 @@ def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asse """ return response - def post_query_assets_with_metadata(self, response: asset_service.QueryAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_query_assets_with_metadata( + self, + response: asset_service.QueryAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.QueryAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for query_assets Override in a subclass to read or manipulate the response or metadata after it @@ -889,7 +1123,14 @@ def post_query_assets_with_metadata(self, response: asset_service.QueryAssetsRes """ return response, metadata - def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_search_all_iam_policies( + self, + request: asset_service.SearchAllIamPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllIamPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for search_all_iam_policies Override in a subclass to manipulate the request or metadata @@ -897,7 +1138,9 @@ def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPolicie """ return request, metadata - def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPoliciesResponse) -> asset_service.SearchAllIamPoliciesResponse: + def post_search_all_iam_policies( + self, response: asset_service.SearchAllIamPoliciesResponse + ) -> asset_service.SearchAllIamPoliciesResponse: """Post-rpc interceptor for search_all_iam_policies DEPRECATED. Please use the `post_search_all_iam_policies_with_metadata` @@ -910,7 +1153,14 @@ def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPolic """ return response - def post_search_all_iam_policies_with_metadata(self, response: asset_service.SearchAllIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_search_all_iam_policies_with_metadata( + self, + response: asset_service.SearchAllIamPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllIamPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for search_all_iam_policies Override in a subclass to read or manipulate the response or metadata after it @@ -925,7 +1175,13 @@ def post_search_all_iam_policies_with_metadata(self, response: asset_service.Sea """ return response, metadata - def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_search_all_resources( + self, + request: asset_service.SearchAllResourcesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for search_all_resources Override in a subclass to manipulate the request or metadata @@ -933,7 +1189,9 @@ def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequ """ return request, metadata - def post_search_all_resources(self, response: asset_service.SearchAllResourcesResponse) -> asset_service.SearchAllResourcesResponse: + def post_search_all_resources( + self, response: asset_service.SearchAllResourcesResponse + ) -> asset_service.SearchAllResourcesResponse: """Post-rpc interceptor for search_all_resources DEPRECATED. Please use the `post_search_all_resources_with_metadata` @@ -946,7 +1204,14 @@ def post_search_all_resources(self, response: asset_service.SearchAllResourcesRe """ return response - def post_search_all_resources_with_metadata(self, response: asset_service.SearchAllResourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_search_all_resources_with_metadata( + self, + response: asset_service.SearchAllResourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllResourcesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for search_all_resources Override in a subclass to read or manipulate the response or metadata after it @@ -961,7 +1226,13 @@ def post_search_all_resources_with_metadata(self, response: asset_service.Search """ return response, metadata - def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_feed( + self, + request: asset_service.UpdateFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.UpdateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_feed Override in a subclass to manipulate the request or metadata @@ -982,7 +1253,11 @@ def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def post_update_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_feed Override in a subclass to read or manipulate the response or metadata after it @@ -997,7 +1272,13 @@ def post_update_feed_with_metadata(self, response: asset_service.Feed, metadata: """ return response, metadata - def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_saved_query( + self, + request: asset_service.UpdateSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_saved_query Override in a subclass to manipulate the request or metadata @@ -1005,7 +1286,9 @@ def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, """ return request, metadata - def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + def post_update_saved_query( + self, response: asset_service.SavedQuery + ) -> asset_service.SavedQuery: """Post-rpc interceptor for update_saved_query DEPRECATED. Please use the `post_update_saved_query_with_metadata` @@ -1018,7 +1301,11 @@ def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_s """ return response - def post_update_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_saved_query Override in a subclass to read or manipulate the response or metadata after it @@ -1034,8 +1321,12 @@ def post_update_saved_query_with_metadata(self, response: asset_service.SavedQue return response, metadata def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -1074,62 +1365,63 @@ class AssetServiceRestTransport(_BaseAssetServiceRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AssetServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AssetServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - interceptor (Optional[AssetServiceRestInterceptor]): Interceptor used - to manipulate requests, request metadata, and responses. - api_audience (Optional[str]): The intended audience for the API calls - to the service that will be set when using certain 3rd party - authentication flows. Audience is typically a resource identifier. - If not set, the host value will be used as a default. + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudasset.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[AssetServiceRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -1141,10 +1433,11 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -1161,28 +1454,33 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=*/*/operations/*/**}', + "method": "get", + "uri": "/v1/{name=*/*/operations/*/**}", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client - class _AnalyzeIamPolicy(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy, AssetServiceRestStub): + class _AnalyzeIamPolicy( + _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeIamPolicy") @@ -1194,26 +1492,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeIamPolicyResponse: + def __call__( + self, + request: asset_service.AnalyzeIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeIamPolicyResponse: r"""Call the analyze iam policy method over HTTP. Args: @@ -1235,30 +1535,42 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_http_options() + ) - request, metadata = self._interceptor.pre_analyze_iam_policy(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_analyze_iam_policy( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicy", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicy", "httpRequest": http_request, @@ -1267,7 +1579,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1282,20 +1601,26 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_analyze_iam_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.AnalyzeIamPolicyResponse.to_json(response) + response_payload = asset_service.AnalyzeIamPolicyResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicy", "metadata": http_response["headers"], @@ -1304,7 +1629,10 @@ def __call__(self, ) return resp - class _AnalyzeIamPolicyLongrunning(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning, AssetServiceRestStub): + class _AnalyzeIamPolicyLongrunning( + _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning, + AssetServiceRestStub, + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeIamPolicyLongrunning") @@ -1316,76 +1644,90 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeIamPolicyLongrunningRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: asset_service.AnalyzeIamPolicyLongrunningRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the analyze iam policy - longrunning method over HTTP. - - Args: - request (~.asset_service.AnalyzeIamPolicyLongrunningRequest): - The request object. A request message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + longrunning method over HTTP. + + Args: + request (~.asset_service.AnalyzeIamPolicyLongrunningRequest): + The request object. A request message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_http_options() - request, metadata = self._interceptor.pre_analyze_iam_policy_longrunning(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_analyze_iam_policy_longrunning( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_transcoded_request( + http_options, request + ) - body = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_request_body_json(transcoded_request) + body = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicyLongrunning", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicyLongrunning", "httpRequest": http_request, @@ -1394,7 +1736,17 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeIamPolicyLongrunning._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = ( + AssetServiceRestTransport._AnalyzeIamPolicyLongrunning._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1407,20 +1759,26 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_iam_policy_longrunning_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = ( + self._interceptor.post_analyze_iam_policy_longrunning_with_metadata( + resp, response_metadata + ) + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy_longrunning", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicyLongrunning", "metadata": http_response["headers"], @@ -1429,7 +1787,9 @@ def __call__(self, ) return resp - class _AnalyzeMove(_BaseAssetServiceRestTransport._BaseAnalyzeMove, AssetServiceRestStub): + class _AnalyzeMove( + _BaseAssetServiceRestTransport._BaseAnalyzeMove, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeMove") @@ -1441,26 +1801,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeMoveRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeMoveResponse: + def __call__( + self, + request: asset_service.AnalyzeMoveRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeMoveResponse: r"""Call the analyze move method over HTTP. Args: @@ -1482,30 +1844,44 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_http_options() + ) request, metadata = self._interceptor.pre_analyze_move(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeMove", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeMove", "httpRequest": http_request, @@ -1514,7 +1890,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeMove._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeMove._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1529,20 +1912,26 @@ def __call__(self, resp = self._interceptor.post_analyze_move(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_move_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_analyze_move_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.AnalyzeMoveResponse.to_json(response) + response_payload = asset_service.AnalyzeMoveResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_move", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeMove", "metadata": http_response["headers"], @@ -1551,7 +1940,9 @@ def __call__(self, ) return resp - class _AnalyzeOrgPolicies(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies, AssetServiceRestStub): + class _AnalyzeOrgPolicies( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeOrgPolicies") @@ -1563,26 +1954,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeOrgPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPoliciesResponse: + def __call__( + self, + request: asset_service.AnalyzeOrgPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeOrgPoliciesResponse: r"""Call the analyze org policies method over HTTP. Args: @@ -1606,28 +1999,38 @@ def __call__(self, http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_http_options() - request, metadata = self._interceptor.pre_analyze_org_policies(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_analyze_org_policies( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicies", "httpRequest": http_request, @@ -1636,7 +2039,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeOrgPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1651,20 +2061,26 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policies(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_org_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_analyze_org_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.AnalyzeOrgPoliciesResponse.to_json(response) + response_payload = asset_service.AnalyzeOrgPoliciesResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicies", "metadata": http_response["headers"], @@ -1673,7 +2089,10 @@ def __call__(self, ) return resp - class _AnalyzeOrgPolicyGovernedAssets(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets, AssetServiceRestStub): + class _AnalyzeOrgPolicyGovernedAssets( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets, + AssetServiceRestStub, + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeOrgPolicyGovernedAssets") @@ -1685,72 +2104,86 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + def __call__( + self, + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: r"""Call the analyze org policy - governed assets method over HTTP. - - Args: - request (~.asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): - The request object. A request message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: - The response message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + governed assets method over HTTP. + + Args: + request (~.asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_http_options() - request, metadata = self._interceptor.pre_analyze_org_policy_governed_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_transcoded_request(http_options, request) + request, metadata = ( + self._interceptor.pre_analyze_org_policy_governed_assets( + request, metadata + ) + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedAssets", "httpRequest": http_request, @@ -1759,7 +2192,16 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + AssetServiceRestTransport._AnalyzeOrgPolicyGovernedAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1774,20 +2216,30 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_org_policy_governed_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = ( + self._interceptor.post_analyze_org_policy_governed_assets_with_metadata( + resp, response_metadata + ) + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(response) + response_payload = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json( + response + ) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedAssets", "metadata": http_response["headers"], @@ -1796,7 +2248,10 @@ def __call__(self, ) return resp - class _AnalyzeOrgPolicyGovernedContainers(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers, AssetServiceRestStub): + class _AnalyzeOrgPolicyGovernedContainers( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers, + AssetServiceRestStub, + ): def __hash__(self): return hash("AssetServiceRestTransport.AnalyzeOrgPolicyGovernedContainers") @@ -1808,72 +2263,86 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + def __call__( + self, + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: r"""Call the analyze org policy - governed containers method over HTTP. - - Args: - request (~.asset_service.AnalyzeOrgPolicyGovernedContainersRequest): - The request object. A request message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.AnalyzeOrgPolicyGovernedContainersResponse: - The response message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + governed containers method over HTTP. + + Args: + request (~.asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_http_options() - request, metadata = self._interceptor.pre_analyze_org_policy_governed_containers(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_transcoded_request(http_options, request) + request, metadata = ( + self._interceptor.pre_analyze_org_policy_governed_containers( + request, metadata + ) + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedContainers", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedContainers", "httpRequest": http_request, @@ -1882,7 +2351,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedContainers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedContainers._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1897,20 +2373,28 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_org_policy_governed_containers_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = ( + self._interceptor.post_analyze_org_policy_governed_containers_with_metadata( + resp, response_metadata + ) + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(response) + response_payload = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_containers", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedContainers", "metadata": http_response["headers"], @@ -1919,7 +2403,9 @@ def __call__(self, ) return resp - class _BatchGetAssetsHistory(_BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory, AssetServiceRestStub): + class _BatchGetAssetsHistory( + _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.BatchGetAssetsHistory") @@ -1931,26 +2417,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.BatchGetAssetsHistoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.BatchGetAssetsHistoryResponse: + def __call__( + self, + request: asset_service.BatchGetAssetsHistoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Call the batch get assets history method over HTTP. Args: @@ -1971,28 +2459,38 @@ def __call__(self, http_options = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_http_options() - request, metadata = self._interceptor.pre_batch_get_assets_history(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_batch_get_assets_history( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetAssetsHistory", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetAssetsHistory", "httpRequest": http_request, @@ -2001,7 +2499,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._BatchGetAssetsHistory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._BatchGetAssetsHistory._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2016,20 +2521,26 @@ def __call__(self, resp = self._interceptor.post_batch_get_assets_history(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_get_assets_history_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_batch_get_assets_history_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.BatchGetAssetsHistoryResponse.to_json(response) + response_payload = ( + asset_service.BatchGetAssetsHistoryResponse.to_json(response) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetAssetsHistory", "metadata": http_response["headers"], @@ -2038,7 +2549,10 @@ def __call__(self, ) return resp - class _BatchGetEffectiveIamPolicies(_BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies, AssetServiceRestStub): + class _BatchGetEffectiveIamPolicies( + _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies, + AssetServiceRestStub, + ): def __hash__(self): return hash("AssetServiceRestTransport.BatchGetEffectiveIamPolicies") @@ -2050,72 +2564,84 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.BatchGetEffectiveIamPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + def __call__( + self, + request: asset_service.BatchGetEffectiveIamPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Call the batch get effective iam - policies method over HTTP. - - Args: - request (~.asset_service.BatchGetEffectiveIamPoliciesRequest): - The request object. A request message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.BatchGetEffectiveIamPoliciesResponse: - A response message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + policies method over HTTP. + + Args: + request (~.asset_service.BatchGetEffectiveIamPoliciesRequest): + The request object. A request message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.asset_service.BatchGetEffectiveIamPoliciesResponse: + A response message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. """ http_options = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_http_options() - request, metadata = self._interceptor.pre_batch_get_effective_iam_policies(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_batch_get_effective_iam_policies( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetEffectiveIamPolicies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetEffectiveIamPolicies", "httpRequest": http_request, @@ -2124,7 +2650,16 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._BatchGetEffectiveIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + AssetServiceRestTransport._BatchGetEffectiveIamPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2139,20 +2674,30 @@ def __call__(self, resp = self._interceptor.post_batch_get_effective_iam_policies(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_get_effective_iam_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = ( + self._interceptor.post_batch_get_effective_iam_policies_with_metadata( + resp, response_metadata + ) + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(response) + response_payload = ( + asset_service.BatchGetEffectiveIamPoliciesResponse.to_json( + response + ) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_effective_iam_policies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetEffectiveIamPolicies", "metadata": http_response["headers"], @@ -2161,7 +2706,9 @@ def __call__(self, ) return resp - class _CreateFeed(_BaseAssetServiceRestTransport._BaseCreateFeed, AssetServiceRestStub): + class _CreateFeed( + _BaseAssetServiceRestTransport._BaseCreateFeed, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.CreateFeed") @@ -2173,27 +2720,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.CreateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: + def __call__( + self, + request: asset_service.CreateFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Call the create feed method over HTTP. Args: @@ -2220,32 +2769,50 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseCreateFeed._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseCreateFeed._get_http_options() + ) request, metadata = self._interceptor.pre_create_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseCreateFeed._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseCreateFeed._get_transcoded_request( + http_options, request + ) + ) - body = _BaseAssetServiceRestTransport._BaseCreateFeed._get_request_body_json(transcoded_request) + body = ( + _BaseAssetServiceRestTransport._BaseCreateFeed._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseCreateFeed._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseCreateFeed._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateFeed", "httpRequest": http_request, @@ -2254,7 +2821,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._CreateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._CreateFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2269,20 +2844,24 @@ def __call__(self, resp = self._interceptor.post_create_feed(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_feed_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.Feed.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.create_feed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateFeed", "metadata": http_response["headers"], @@ -2291,7 +2870,9 @@ def __call__(self, ) return resp - class _CreateSavedQuery(_BaseAssetServiceRestTransport._BaseCreateSavedQuery, AssetServiceRestStub): + class _CreateSavedQuery( + _BaseAssetServiceRestTransport._BaseCreateSavedQuery, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.CreateSavedQuery") @@ -2303,27 +2884,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.CreateSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: + def __call__( + self, + request: asset_service.CreateSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Call the create saved query method over HTTP. Args: @@ -2344,32 +2927,46 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_http_options() + ) - request, metadata = self._interceptor.pre_create_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_create_saved_query( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_transcoded_request( + http_options, request + ) - body = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_request_body_json(transcoded_request) + body = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateSavedQuery", "httpRequest": http_request, @@ -2378,7 +2975,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._CreateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._CreateSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2393,20 +2998,24 @@ def __call__(self, resp = self._interceptor.post_create_saved_query(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_saved_query_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.SavedQuery.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.create_saved_query", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateSavedQuery", "metadata": http_response["headers"], @@ -2415,7 +3024,9 @@ def __call__(self, ) return resp - class _DeleteFeed(_BaseAssetServiceRestTransport._BaseDeleteFeed, AssetServiceRestStub): + class _DeleteFeed( + _BaseAssetServiceRestTransport._BaseDeleteFeed, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.DeleteFeed") @@ -2427,26 +3038,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.DeleteFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): + def __call__( + self, + request: asset_service.DeleteFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): r"""Call the delete feed method over HTTP. Args: @@ -2461,30 +3074,44 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseDeleteFeed._get_http_options() + ) request, metadata = self._interceptor.pre_delete_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseDeleteFeed._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseDeleteFeed._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "DeleteFeed", "httpRequest": http_request, @@ -2493,14 +3120,23 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._DeleteFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._DeleteFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteSavedQuery(_BaseAssetServiceRestTransport._BaseDeleteSavedQuery, AssetServiceRestStub): + class _DeleteSavedQuery( + _BaseAssetServiceRestTransport._BaseDeleteSavedQuery, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.DeleteSavedQuery") @@ -2512,26 +3148,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.DeleteSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): + def __call__( + self, + request: asset_service.DeleteSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): r"""Call the delete saved query method over HTTP. Args: @@ -2546,30 +3184,42 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_http_options() + ) - request, metadata = self._interceptor.pre_delete_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_delete_saved_query( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "DeleteSavedQuery", "httpRequest": http_request, @@ -2578,14 +3228,23 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._DeleteSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._DeleteSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _ExportAssets(_BaseAssetServiceRestTransport._BaseExportAssets, AssetServiceRestStub): + class _ExportAssets( + _BaseAssetServiceRestTransport._BaseExportAssets, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.ExportAssets") @@ -2597,27 +3256,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.ExportAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: asset_service.ExportAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the export assets method over HTTP. Args: @@ -2639,32 +3300,48 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseExportAssets._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseExportAssets._get_http_options() + ) request, metadata = self._interceptor.pre_export_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseExportAssets._get_transcoded_request(http_options, request) + transcoded_request = _BaseAssetServiceRestTransport._BaseExportAssets._get_transcoded_request( + http_options, request + ) - body = _BaseAssetServiceRestTransport._BaseExportAssets._get_request_body_json(transcoded_request) + body = ( + _BaseAssetServiceRestTransport._BaseExportAssets._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseExportAssets._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseExportAssets._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ExportAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ExportAssets", "httpRequest": http_request, @@ -2673,7 +3350,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ExportAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._ExportAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2686,20 +3371,24 @@ def __call__(self, resp = self._interceptor.post_export_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_export_assets_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.export_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ExportAssets", "metadata": http_response["headers"], @@ -2720,26 +3409,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.GetFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: + def __call__( + self, + request: asset_service.GetFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Call the get feed method over HTTP. Args: @@ -2766,30 +3457,44 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseGetFeed._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseGetFeed._get_http_options() + ) request, metadata = self._interceptor.pre_get_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseGetFeed._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseGetFeed._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseGetFeed._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseGetFeed._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetFeed", "httpRequest": http_request, @@ -2798,7 +3503,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._GetFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._GetFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2813,20 +3525,24 @@ def __call__(self, resp = self._interceptor.post_get_feed(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_feed_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.Feed.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.get_feed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetFeed", "metadata": http_response["headers"], @@ -2835,7 +3551,9 @@ def __call__(self, ) return resp - class _GetSavedQuery(_BaseAssetServiceRestTransport._BaseGetSavedQuery, AssetServiceRestStub): + class _GetSavedQuery( + _BaseAssetServiceRestTransport._BaseGetSavedQuery, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.GetSavedQuery") @@ -2847,26 +3565,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.GetSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: + def __call__( + self, + request: asset_service.GetSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Call the get saved query method over HTTP. Args: @@ -2887,30 +3607,40 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_http_options() + ) request, metadata = self._interceptor.pre_get_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_transcoded_request(http_options, request) + transcoded_request = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetSavedQuery", "httpRequest": http_request, @@ -2919,7 +3649,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._GetSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._GetSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2934,20 +3671,24 @@ def __call__(self, resp = self._interceptor.post_get_saved_query(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_saved_query_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.SavedQuery.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.get_saved_query", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetSavedQuery", "metadata": http_response["headers"], @@ -2956,7 +3697,9 @@ def __call__(self, ) return resp - class _ListAssets(_BaseAssetServiceRestTransport._BaseListAssets, AssetServiceRestStub): + class _ListAssets( + _BaseAssetServiceRestTransport._BaseListAssets, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.ListAssets") @@ -2968,26 +3711,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.ListAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListAssetsResponse: + def __call__( + self, + request: asset_service.ListAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListAssetsResponse: r"""Call the list assets method over HTTP. Args: @@ -3006,30 +3751,44 @@ def __call__(self, ListAssets response. """ - http_options = _BaseAssetServiceRestTransport._BaseListAssets._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseListAssets._get_http_options() + ) request, metadata = self._interceptor.pre_list_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseListAssets._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseListAssets._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseListAssets._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseListAssets._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListAssets", "httpRequest": http_request, @@ -3038,7 +3797,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ListAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._ListAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3053,20 +3819,26 @@ def __call__(self, resp = self._interceptor.post_list_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_assets_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.ListAssetsResponse.to_json(response) + response_payload = asset_service.ListAssetsResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.list_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListAssets", "metadata": http_response["headers"], @@ -3075,7 +3847,9 @@ def __call__(self, ) return resp - class _ListFeeds(_BaseAssetServiceRestTransport._BaseListFeeds, AssetServiceRestStub): + class _ListFeeds( + _BaseAssetServiceRestTransport._BaseListFeeds, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.ListFeeds") @@ -3087,26 +3861,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.ListFeedsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListFeedsResponse: + def __call__( + self, + request: asset_service.ListFeedsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListFeedsResponse: r"""Call the list feeds method over HTTP. Args: @@ -3125,30 +3901,44 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseListFeeds._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseListFeeds._get_http_options() + ) request, metadata = self._interceptor.pre_list_feeds(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseListFeeds._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseListFeeds._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseListFeeds._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseListFeeds._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListFeeds", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListFeeds", "httpRequest": http_request, @@ -3157,7 +3947,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ListFeeds._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._ListFeeds._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3172,20 +3969,24 @@ def __call__(self, resp = self._interceptor.post_list_feeds(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_feeds_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_feeds_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.ListFeedsResponse.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.list_feeds", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListFeeds", "metadata": http_response["headers"], @@ -3194,7 +3995,9 @@ def __call__(self, ) return resp - class _ListSavedQueries(_BaseAssetServiceRestTransport._BaseListSavedQueries, AssetServiceRestStub): + class _ListSavedQueries( + _BaseAssetServiceRestTransport._BaseListSavedQueries, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.ListSavedQueries") @@ -3206,26 +4009,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.ListSavedQueriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListSavedQueriesResponse: + def __call__( + self, + request: asset_service.ListSavedQueriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListSavedQueriesResponse: r"""Call the list saved queries method over HTTP. Args: @@ -3244,30 +4049,42 @@ def __call__(self, Response of listing saved queries. """ - http_options = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseListSavedQueries._get_http_options() + ) - request, metadata = self._interceptor.pre_list_saved_queries(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_saved_queries( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListSavedQueries", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListSavedQueries", "httpRequest": http_request, @@ -3276,7 +4093,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ListSavedQueries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._ListSavedQueries._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3291,20 +4115,26 @@ def __call__(self, resp = self._interceptor.post_list_saved_queries(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_saved_queries_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_saved_queries_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.ListSavedQueriesResponse.to_json(response) + response_payload = asset_service.ListSavedQueriesResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.list_saved_queries", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListSavedQueries", "metadata": http_response["headers"], @@ -3313,7 +4143,9 @@ def __call__(self, ) return resp - class _QueryAssets(_BaseAssetServiceRestTransport._BaseQueryAssets, AssetServiceRestStub): + class _QueryAssets( + _BaseAssetServiceRestTransport._BaseQueryAssets, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.QueryAssets") @@ -3325,27 +4157,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.QueryAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.QueryAssetsResponse: + def __call__( + self, + request: asset_service.QueryAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.QueryAssetsResponse: r"""Call the query assets method over HTTP. Args: @@ -3364,32 +4198,50 @@ def __call__(self, QueryAssets response. """ - http_options = _BaseAssetServiceRestTransport._BaseQueryAssets._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseQueryAssets._get_http_options() + ) request, metadata = self._interceptor.pre_query_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseQueryAssets._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseQueryAssets._get_transcoded_request( + http_options, request + ) + ) - body = _BaseAssetServiceRestTransport._BaseQueryAssets._get_request_body_json(transcoded_request) + body = ( + _BaseAssetServiceRestTransport._BaseQueryAssets._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseQueryAssets._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseQueryAssets._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.QueryAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "QueryAssets", "httpRequest": http_request, @@ -3398,7 +4250,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._QueryAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._QueryAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3413,20 +4273,26 @@ def __call__(self, resp = self._interceptor.post_query_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_query_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_query_assets_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.QueryAssetsResponse.to_json(response) + response_payload = asset_service.QueryAssetsResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.query_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "QueryAssets", "metadata": http_response["headers"], @@ -3435,7 +4301,9 @@ def __call__(self, ) return resp - class _SearchAllIamPolicies(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies, AssetServiceRestStub): + class _SearchAllIamPolicies( + _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.SearchAllIamPolicies") @@ -3447,26 +4315,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.SearchAllIamPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SearchAllIamPoliciesResponse: + def __call__( + self, + request: asset_service.SearchAllIamPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SearchAllIamPoliciesResponse: r"""Call the search all iam policies method over HTTP. Args: @@ -3487,28 +4357,38 @@ def __call__(self, http_options = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_http_options() - request, metadata = self._interceptor.pre_search_all_iam_policies(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_search_all_iam_policies( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllIamPolicies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllIamPolicies", "httpRequest": http_request, @@ -3517,7 +4397,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._SearchAllIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._SearchAllIamPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3532,20 +4419,26 @@ def __call__(self, resp = self._interceptor.post_search_all_iam_policies(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_all_iam_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_search_all_iam_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.SearchAllIamPoliciesResponse.to_json(response) + response_payload = ( + asset_service.SearchAllIamPoliciesResponse.to_json(response) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_iam_policies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllIamPolicies", "metadata": http_response["headers"], @@ -3554,7 +4447,9 @@ def __call__(self, ) return resp - class _SearchAllResources(_BaseAssetServiceRestTransport._BaseSearchAllResources, AssetServiceRestStub): + class _SearchAllResources( + _BaseAssetServiceRestTransport._BaseSearchAllResources, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.SearchAllResources") @@ -3566,26 +4461,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.SearchAllResourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SearchAllResourcesResponse: + def __call__( + self, + request: asset_service.SearchAllResourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SearchAllResourcesResponse: r"""Call the search all resources method over HTTP. Args: @@ -3606,28 +4503,38 @@ def __call__(self, http_options = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_http_options() - request, metadata = self._interceptor.pre_search_all_resources(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_search_all_resources( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllResources", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllResources", "httpRequest": http_request, @@ -3636,7 +4543,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._SearchAllResources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._SearchAllResources._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3651,20 +4565,26 @@ def __call__(self, resp = self._interceptor.post_search_all_resources(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_all_resources_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_search_all_resources_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = asset_service.SearchAllResourcesResponse.to_json(response) + response_payload = asset_service.SearchAllResourcesResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_resources", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllResources", "metadata": http_response["headers"], @@ -3673,7 +4593,9 @@ def __call__(self, ) return resp - class _UpdateFeed(_BaseAssetServiceRestTransport._BaseUpdateFeed, AssetServiceRestStub): + class _UpdateFeed( + _BaseAssetServiceRestTransport._BaseUpdateFeed, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.UpdateFeed") @@ -3685,27 +4607,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.UpdateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: + def __call__( + self, + request: asset_service.UpdateFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Call the update feed method over HTTP. Args: @@ -3732,32 +4656,50 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseUpdateFeed._get_http_options() + ) request, metadata = self._interceptor.pre_update_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseAssetServiceRestTransport._BaseUpdateFeed._get_transcoded_request( + http_options, request + ) + ) - body = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_request_body_json(transcoded_request) + body = ( + _BaseAssetServiceRestTransport._BaseUpdateFeed._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseUpdateFeed._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateFeed", "httpRequest": http_request, @@ -3766,7 +4708,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._UpdateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._UpdateFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3781,20 +4731,24 @@ def __call__(self, resp = self._interceptor.post_update_feed(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_feed_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.Feed.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.update_feed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateFeed", "metadata": http_response["headers"], @@ -3803,7 +4757,9 @@ def __call__(self, ) return resp - class _UpdateSavedQuery(_BaseAssetServiceRestTransport._BaseUpdateSavedQuery, AssetServiceRestStub): + class _UpdateSavedQuery( + _BaseAssetServiceRestTransport._BaseUpdateSavedQuery, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.UpdateSavedQuery") @@ -3815,27 +4771,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.UpdateSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: + def __call__( + self, + request: asset_service.UpdateSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Call the update saved query method over HTTP. Args: @@ -3856,32 +4814,46 @@ def __call__(self, """ - http_options = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_http_options() + ) - request, metadata = self._interceptor.pre_update_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_update_saved_query( + request, metadata + ) + transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_transcoded_request( + http_options, request + ) - body = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_request_body_json(transcoded_request) + body = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateSavedQuery", "httpRequest": http_request, @@ -3890,7 +4862,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._UpdateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._UpdateSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3905,20 +4885,24 @@ def __call__(self, resp = self._interceptor.post_update_saved_query(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_saved_query_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = asset_service.SavedQuery.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.update_saved_query", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateSavedQuery", "metadata": http_response["headers"], @@ -3928,194 +4912,233 @@ def __call__(self, return resp @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - asset_service.AnalyzeIamPolicyResponse]: + def analyze_iam_policy( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], asset_service.AnalyzeIamPolicyResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeIamPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeIamPolicy(self._session, self._host, self._interceptor) # type: ignore @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - operations_pb2.Operation]: + def analyze_iam_policy_longrunning( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], operations_pb2.Operation + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeIamPolicyLongrunning(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeIamPolicyLongrunning( + self._session, self._host, self._interceptor + ) # type: ignore @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - asset_service.AnalyzeMoveResponse]: + def analyze_move( + self, + ) -> Callable[ + [asset_service.AnalyzeMoveRequest], asset_service.AnalyzeMoveResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeMove(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeMove(self._session, self._host, self._interceptor) # type: ignore @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - asset_service.AnalyzeOrgPoliciesResponse]: + def analyze_org_policies( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + asset_service.AnalyzeOrgPoliciesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicies(self._session, self._host, self._interceptor) # type: ignore @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + def analyze_org_policy_governed_assets( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicyGovernedAssets(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicyGovernedAssets( + self._session, self._host, self._interceptor + ) # type: ignore @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + def analyze_org_policy_governed_containers( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicyGovernedContainers(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicyGovernedContainers( + self._session, self._host, self._interceptor + ) # type: ignore @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - asset_service.BatchGetAssetsHistoryResponse]: + def batch_get_assets_history( + self, + ) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + asset_service.BatchGetAssetsHistoryResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore + return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - asset_service.BatchGetEffectiveIamPoliciesResponse]: + def batch_get_effective_iam_policies( + self, + ) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + asset_service.BatchGetEffectiveIamPoliciesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._BatchGetEffectiveIamPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._BatchGetEffectiveIamPolicies( + self._session, self._host, self._interceptor + ) # type: ignore @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - asset_service.Feed]: + def create_feed( + self, + ) -> Callable[[asset_service.CreateFeedRequest], asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore + return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - asset_service.SavedQuery]: + def create_saved_query( + self, + ) -> Callable[[asset_service.CreateSavedQueryRequest], asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._CreateSavedQuery(self._session, self._host, self._interceptor) # type: ignore @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - empty_pb2.Empty]: + def delete_feed( + self, + ) -> Callable[[asset_service.DeleteFeedRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - empty_pb2.Empty]: + def delete_saved_query( + self, + ) -> Callable[[asset_service.DeleteSavedQueryRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteSavedQuery(self._session, self._host, self._interceptor) # type: ignore @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - operations_pb2.Operation]: + def export_assets( + self, + ) -> Callable[[asset_service.ExportAssetsRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ExportAssets(self._session, self._host, self._interceptor) # type: ignore + return self._ExportAssets(self._session, self._host, self._interceptor) # type: ignore @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - asset_service.Feed]: + def get_feed(self) -> Callable[[asset_service.GetFeedRequest], asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore + return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - asset_service.SavedQuery]: + def get_saved_query( + self, + ) -> Callable[[asset_service.GetSavedQueryRequest], asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._GetSavedQuery(self._session, self._host, self._interceptor) # type: ignore @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - asset_service.ListAssetsResponse]: + def list_assets( + self, + ) -> Callable[[asset_service.ListAssetsRequest], asset_service.ListAssetsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore + return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - asset_service.ListFeedsResponse]: + def list_feeds( + self, + ) -> Callable[[asset_service.ListFeedsRequest], asset_service.ListFeedsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore + return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - asset_service.ListSavedQueriesResponse]: + def list_saved_queries( + self, + ) -> Callable[ + [asset_service.ListSavedQueriesRequest], asset_service.ListSavedQueriesResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListSavedQueries(self._session, self._host, self._interceptor) # type: ignore + return self._ListSavedQueries(self._session, self._host, self._interceptor) # type: ignore @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - asset_service.QueryAssetsResponse]: + def query_assets( + self, + ) -> Callable[ + [asset_service.QueryAssetsRequest], asset_service.QueryAssetsResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._QueryAssets(self._session, self._host, self._interceptor) # type: ignore + return self._QueryAssets(self._session, self._host, self._interceptor) # type: ignore @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - asset_service.SearchAllIamPoliciesResponse]: + def search_all_iam_policies( + self, + ) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + asset_service.SearchAllIamPoliciesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SearchAllIamPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._SearchAllIamPolicies(self._session, self._host, self._interceptor) # type: ignore @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - asset_service.SearchAllResourcesResponse]: + def search_all_resources( + self, + ) -> Callable[ + [asset_service.SearchAllResourcesRequest], + asset_service.SearchAllResourcesResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SearchAllResources(self._session, self._host, self._interceptor) # type: ignore + return self._SearchAllResources(self._session, self._host, self._interceptor) # type: ignore @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - asset_service.Feed]: + def update_feed( + self, + ) -> Callable[[asset_service.UpdateFeedRequest], asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - asset_service.SavedQuery]: + def update_saved_query( + self, + ) -> Callable[[asset_service.UpdateSavedQueryRequest], asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(_BaseAssetServiceRestTransport._BaseGetOperation, AssetServiceRestStub): + class _GetOperation( + _BaseAssetServiceRestTransport._BaseGetOperation, AssetServiceRestStub + ): def __hash__(self): return hash("AssetServiceRestTransport.GetOperation") @@ -4127,27 +5150,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -4165,30 +5189,42 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseAssetServiceRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseAssetServiceRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + transcoded_request = _BaseAssetServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseAssetServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetOperation", "httpRequest": http_request, @@ -4197,7 +5233,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4208,19 +5251,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetOperation", "httpResponse": http_response, @@ -4237,6 +5282,4 @@ def close(self): self._session.close() -__all__=( - 'AssetServiceRestTransport', -) +__all__ = ("AssetServiceRestTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py index b4c1d8818ced..990c272ba9bf 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py @@ -14,19 +14,16 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - -from google.cloud.asset_v1.types import asset_service import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +from google.api_core import gapic_v1, path_template +from google.cloud.asset_v1.types import asset_service from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from .base import DEFAULT_CLIENT_INFO, AssetServiceTransport class _BaseAssetServiceRestTransport(AssetServiceTransport): @@ -42,14 +39,16 @@ class _BaseAssetServiceRestTransport(AssetServiceTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): @@ -73,7 +72,9 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -84,26 +85,32 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseAnalyzeIamPolicy: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "analysisQuery" : {}, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "analysisQuery": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicy', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{analysis_query.scope=*/*}:analyzeIamPolicy", + }, ] return http_options @@ -115,11 +122,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_unset_required_fields( + query_params + ) + ) return query_params @@ -127,20 +140,24 @@ class _BaseAnalyzeIamPolicyLongrunning: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicyLongrunning', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{analysis_query.scope=*/*}:analyzeIamPolicyLongrunning", + "body": "*", + }, ] return http_options @@ -155,17 +172,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_unset_required_fields( + query_params + ) + ) return query_params @@ -173,19 +196,25 @@ class _BaseAnalyzeMove: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "destinationParent" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "destinationParent": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=*/*}:analyzeMove', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=*/*}:analyzeMove", + }, ] return http_options @@ -197,11 +226,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeMove._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_unset_required_fields( + query_params + ) + ) return query_params @@ -209,19 +244,25 @@ class _BaseAnalyzeOrgPolicies: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicies', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:analyzeOrgPolicies", + }, ] return http_options @@ -233,11 +274,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_unset_required_fields( + query_params + ) + ) return query_params @@ -245,19 +292,25 @@ class _BaseAnalyzeOrgPolicyGovernedAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets", + }, ] return http_options @@ -269,11 +322,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_unset_required_fields( + query_params + ) + ) return query_params @@ -281,35 +340,49 @@ class _BaseAnalyzeOrgPolicyGovernedContainers: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): - pb_request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(request) + pb_request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb( + request + ) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_unset_required_fields( + query_params + ) + ) return query_params @@ -317,19 +390,23 @@ class _BaseBatchGetAssetsHistory: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}:batchGetAssetsHistory', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}:batchGetAssetsHistory", + }, ] return http_options @@ -341,11 +418,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_unset_required_fields( + query_params + ) + ) return query_params @@ -353,19 +436,25 @@ class _BaseBatchGetEffectiveIamPolicies: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "names" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "names": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}/effectiveIamPolicies:batchGet', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}/effectiveIamPolicies:batchGet", + }, ] return http_options @@ -377,11 +466,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_unset_required_fields( + query_params + ) + ) return query_params @@ -389,20 +484,24 @@ class _BaseCreateFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/feeds', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}/feeds", + "body": "*", + }, ] return http_options @@ -417,17 +516,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseCreateFeed._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseCreateFeed._get_unset_required_fields( + query_params + ) + ) return query_params @@ -435,20 +540,26 @@ class _BaseCreateSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "savedQueryId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "savedQueryId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/savedQueries', - 'body': 'saved_query', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}/savedQueries", + "body": "saved_query", + }, ] return http_options @@ -463,17 +574,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_unset_required_fields( + query_params + ) + ) return query_params @@ -481,19 +598,23 @@ class _BaseDeleteFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=*/*/feeds/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=*/*/feeds/*}", + }, ] return http_options @@ -505,11 +626,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseDeleteFeed._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseDeleteFeed._get_unset_required_fields( + query_params + ) + ) return query_params @@ -517,19 +644,23 @@ class _BaseDeleteSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=*/*/savedQueries/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=*/*/savedQueries/*}", + }, ] return http_options @@ -541,11 +672,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_unset_required_fields( + query_params + ) + ) return query_params @@ -553,20 +690,24 @@ class _BaseExportAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}:exportAssets', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}:exportAssets", + "body": "*", + }, ] return http_options @@ -581,17 +722,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseExportAssets._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseExportAssets._get_unset_required_fields( + query_params + ) + ) return query_params @@ -599,19 +746,23 @@ class _BaseGetFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/feeds/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=*/*/feeds/*}", + }, ] return http_options @@ -623,11 +774,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseGetFeed._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseGetFeed._get_unset_required_fields( + query_params + ) + ) return query_params @@ -635,19 +792,23 @@ class _BaseGetSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/savedQueries/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=*/*/savedQueries/*}", + }, ] return http_options @@ -659,11 +820,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseGetSavedQuery._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_unset_required_fields( + query_params + ) + ) return query_params @@ -671,19 +838,23 @@ class _BaseListAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/assets', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}/assets", + }, ] return http_options @@ -695,11 +866,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseListAssets._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseListAssets._get_unset_required_fields( + query_params + ) + ) return query_params @@ -707,19 +884,23 @@ class _BaseListFeeds: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/feeds', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}/feeds", + }, ] return http_options @@ -731,11 +912,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseListFeeds._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseListFeeds._get_unset_required_fields( + query_params + ) + ) return query_params @@ -743,19 +930,23 @@ class _BaseListSavedQueries: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/savedQueries', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}/savedQueries", + }, ] return http_options @@ -767,11 +958,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseListSavedQueries._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseListSavedQueries._get_unset_required_fields( + query_params + ) + ) return query_params @@ -779,20 +976,24 @@ class _BaseQueryAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}:queryAssets', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}:queryAssets", + "body": "*", + }, ] return http_options @@ -807,17 +1008,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseQueryAssets._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseQueryAssets._get_unset_required_fields( + query_params + ) + ) return query_params @@ -825,19 +1032,23 @@ class _BaseSearchAllIamPolicies: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:searchAllIamPolicies', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:searchAllIamPolicies", + }, ] return http_options @@ -849,11 +1060,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_unset_required_fields( + query_params + ) + ) return query_params @@ -861,19 +1078,23 @@ class _BaseSearchAllResources: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:searchAllResources', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:searchAllResources", + }, ] return http_options @@ -885,11 +1106,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllResources._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseSearchAllResources._get_unset_required_fields( + query_params + ) + ) return query_params @@ -897,20 +1124,24 @@ class _BaseUpdateFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{feed.name=*/*/feeds/*}', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{feed.name=*/*/feeds/*}", + "body": "*", + }, ] return http_options @@ -925,17 +1156,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseUpdateFeed._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseUpdateFeed._get_unset_required_fields( + query_params + ) + ) return query_params @@ -943,20 +1180,26 @@ class _BaseUpdateSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{saved_query.name=*/*/savedQueries/*}', - 'body': 'saved_query', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{saved_query.name=*/*/savedQueries/*}", + "body": "saved_query", + }, ] return http_options @@ -971,17 +1214,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_unset_required_fields( + query_params + ) + ) return query_params @@ -991,26 +1240,24 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/operations/*/**}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=*/*/operations/*/**}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseAssetServiceRestTransport', -) +__all__ = ("_BaseAssetServiceRestTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py index d8a9b7f9108d..7d558fc697de 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -34,6 +34,7 @@ BatchGetEffectiveIamPoliciesRequest, BatchGetEffectiveIamPoliciesResponse, BigQueryDestination, + ContentType, CreateFeedRequest, CreateSavedQueryRequest, DeleteFeedRequest, @@ -74,7 +75,6 @@ TableSchema, UpdateFeedRequest, UpdateSavedQueryRequest, - ContentType, ) from .assets import ( Asset, @@ -96,81 +96,81 @@ ) __all__ = ( - 'AnalyzeIamPolicyLongrunningMetadata', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'BigQueryDestination', - 'CreateFeedRequest', - 'CreateSavedQueryRequest', - 'DeleteFeedRequest', - 'DeleteSavedQueryRequest', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'Feed', - 'FeedOutputConfig', - 'GcsDestination', - 'GcsOutputResult', - 'GetFeedRequest', - 'GetSavedQueryRequest', - 'IamPolicyAnalysisOutputConfig', - 'IamPolicyAnalysisQuery', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'OutputConfig', - 'OutputResult', - 'PartitionSpec', - 'PubsubDestination', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'SavedQuery', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'TableFieldSchema', - 'TableSchema', - 'UpdateFeedRequest', - 'UpdateSavedQueryRequest', - 'ContentType', - 'Asset', - 'AttachedResource', - 'ConditionEvaluation', - 'IamPolicyAnalysisResult', - 'IamPolicyAnalysisState', - 'IamPolicySearchResult', - 'RelatedAsset', - 'RelatedAssets', - 'RelatedResource', - 'RelatedResources', - 'RelationshipAttributes', - 'Resource', - 'ResourceSearchResult', - 'TemporalAsset', - 'TimeWindow', - 'VersionedResource', + "AnalyzeIamPolicyLongrunningMetadata", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "BigQueryDestination", + "CreateFeedRequest", + "CreateSavedQueryRequest", + "DeleteFeedRequest", + "DeleteSavedQueryRequest", + "ExportAssetsRequest", + "ExportAssetsResponse", + "Feed", + "FeedOutputConfig", + "GcsDestination", + "GcsOutputResult", + "GetFeedRequest", + "GetSavedQueryRequest", + "IamPolicyAnalysisOutputConfig", + "IamPolicyAnalysisQuery", + "ListAssetsRequest", + "ListAssetsResponse", + "ListFeedsRequest", + "ListFeedsResponse", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "OutputConfig", + "OutputResult", + "PartitionSpec", + "PubsubDestination", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "SavedQuery", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "TableFieldSchema", + "TableSchema", + "UpdateFeedRequest", + "UpdateSavedQueryRequest", + "ContentType", + "Asset", + "AttachedResource", + "ConditionEvaluation", + "IamPolicyAnalysisResult", + "IamPolicyAnalysisState", + "IamPolicySearchResult", + "RelatedAsset", + "RelatedAssets", + "RelatedResource", + "RelatedResources", + "RelationshipAttributes", + "Resource", + "ResourceSearchResult", + "TemporalAsset", + "TimeWindow", + "VersionedResource", ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 371873f8961b..4daa246371c7 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -17,9 +17,6 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.asset_v1.types import assets as gca_assets import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore @@ -27,72 +24,73 @@ import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.rpc.status_pb2 as status_pb2 # type: ignore import google.type.expr_pb2 as expr_pb2 # type: ignore - +import proto # type: ignore +from google.cloud.asset_v1.types import assets as gca_assets __protobuf__ = proto.module( - package='google.cloud.asset.v1', + package="google.cloud.asset.v1", manifest={ - 'ContentType', - 'AnalyzeIamPolicyLongrunningMetadata', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'CreateFeedRequest', - 'GetFeedRequest', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'UpdateFeedRequest', - 'DeleteFeedRequest', - 'OutputConfig', - 'OutputResult', - 'GcsOutputResult', - 'GcsDestination', - 'BigQueryDestination', - 'PartitionSpec', - 'PubsubDestination', - 'FeedOutputConfig', - 'Feed', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'IamPolicyAnalysisQuery', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'IamPolicyAnalysisOutputConfig', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'SavedQuery', - 'CreateSavedQueryRequest', - 'GetSavedQueryRequest', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'UpdateSavedQueryRequest', - 'DeleteSavedQueryRequest', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'TableSchema', - 'TableFieldSchema', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', + "ContentType", + "AnalyzeIamPolicyLongrunningMetadata", + "ExportAssetsRequest", + "ExportAssetsResponse", + "ListAssetsRequest", + "ListAssetsResponse", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "CreateFeedRequest", + "GetFeedRequest", + "ListFeedsRequest", + "ListFeedsResponse", + "UpdateFeedRequest", + "DeleteFeedRequest", + "OutputConfig", + "OutputResult", + "GcsOutputResult", + "GcsDestination", + "BigQueryDestination", + "PartitionSpec", + "PubsubDestination", + "FeedOutputConfig", + "Feed", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "IamPolicyAnalysisQuery", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "IamPolicyAnalysisOutputConfig", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "SavedQuery", + "CreateSavedQueryRequest", + "GetSavedQueryRequest", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "UpdateSavedQueryRequest", + "DeleteSavedQueryRequest", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "TableSchema", + "TableFieldSchema", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", }, ) @@ -117,6 +115,7 @@ class ContentType(proto.Enum): RELATIONSHIP (7): The related resources. """ + CONTENT_TYPE_UNSPECIFIED = 0 RESOURCE = 1 IAM_POLICY = 2 @@ -224,15 +223,15 @@ class ExportAssetsRequest(proto.Message): proto.STRING, number=3, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=4, - enum='ContentType', + enum="ContentType", ) - output_config: 'OutputConfig' = proto.Field( + output_config: "OutputConfig" = proto.Field( proto.MESSAGE, number=5, - message='OutputConfig', + message="OutputConfig", ) relationship_types: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -267,15 +266,15 @@ class ExportAssetsResponse(proto.Message): number=1, message=timestamp_pb2.Timestamp, ) - output_config: 'OutputConfig' = proto.Field( + output_config: "OutputConfig" = proto.Field( proto.MESSAGE, number=2, - message='OutputConfig', + message="OutputConfig", ) - output_result: 'OutputResult' = proto.Field( + output_result: "OutputResult" = proto.Field( proto.MESSAGE, number=3, - message='OutputResult', + message="OutputResult", ) @@ -368,10 +367,10 @@ class ListAssetsRequest(proto.Message): proto.STRING, number=3, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=4, - enum='ContentType', + enum="ContentType", ) page_size: int = proto.Field( proto.INT32, @@ -479,10 +478,10 @@ class BatchGetAssetsHistoryRequest(proto.Message): proto.STRING, number=2, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=3, - enum='ContentType', + enum="ContentType", ) read_time_window: gca_assets.TimeWindow = proto.Field( proto.MESSAGE, @@ -543,10 +542,10 @@ class CreateFeedRequest(proto.Message): proto.STRING, number=2, ) - feed: 'Feed' = proto.Field( + feed: "Feed" = proto.Field( proto.MESSAGE, number=3, - message='Feed', + message="Feed", ) @@ -594,10 +593,10 @@ class ListFeedsResponse(proto.Message): A list of feeds. """ - feeds: MutableSequence['Feed'] = proto.RepeatedField( + feeds: MutableSequence["Feed"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Feed', + message="Feed", ) @@ -617,10 +616,10 @@ class UpdateFeedRequest(proto.Message): contain fields that are immutable or only set by the server. """ - feed: 'Feed' = proto.Field( + feed: "Feed" = proto.Field( proto.MESSAGE, number=1, - message='Feed', + message="Feed", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -669,17 +668,17 @@ class OutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. """ - gcs_destination: 'GcsDestination' = proto.Field( + gcs_destination: "GcsDestination" = proto.Field( proto.MESSAGE, number=1, - oneof='destination', - message='GcsDestination', + oneof="destination", + message="GcsDestination", ) - bigquery_destination: 'BigQueryDestination' = proto.Field( + bigquery_destination: "BigQueryDestination" = proto.Field( proto.MESSAGE, number=2, - oneof='destination', - message='BigQueryDestination', + oneof="destination", + message="BigQueryDestination", ) @@ -695,11 +694,11 @@ class OutputResult(proto.Message): This field is a member of `oneof`_ ``result``. """ - gcs_result: 'GcsOutputResult' = proto.Field( + gcs_result: "GcsOutputResult" = proto.Field( proto.MESSAGE, number=1, - oneof='result', - message='GcsOutputResult', + oneof="result", + message="GcsOutputResult", ) @@ -759,12 +758,12 @@ class GcsDestination(proto.Message): uri: str = proto.Field( proto.STRING, number=1, - oneof='object_uri', + oneof="object_uri", ) uri_prefix: str = proto.Field( proto.STRING, number=2, - oneof='object_uri', + oneof="object_uri", ) @@ -864,10 +863,10 @@ class BigQueryDestination(proto.Message): proto.BOOL, number=3, ) - partition_spec: 'PartitionSpec' = proto.Field( + partition_spec: "PartitionSpec" = proto.Field( proto.MESSAGE, number=4, - message='PartitionSpec', + message="PartitionSpec", ) separate_tables_per_asset_type: bool = proto.Field( proto.BOOL, @@ -884,6 +883,7 @@ class PartitionSpec(proto.Message): The partition key for BigQuery partitioned table. """ + class PartitionKey(proto.Enum): r"""This enum is used to determine the partition key column when exporting assets to BigQuery partitioned table(s). Note that, if the @@ -910,6 +910,7 @@ class PartitionKey(proto.Enum): timestamp column representing when the request was received. """ + PARTITION_KEY_UNSPECIFIED = 0 READ_TIME = 1 REQUEST_TIME = 2 @@ -948,11 +949,11 @@ class FeedOutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. """ - pubsub_destination: 'PubsubDestination' = proto.Field( + pubsub_destination: "PubsubDestination" = proto.Field( proto.MESSAGE, number=1, - oneof='destination', - message='PubsubDestination', + oneof="destination", + message="PubsubDestination", ) @@ -1048,15 +1049,15 @@ class Feed(proto.Message): proto.STRING, number=3, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=4, - enum='ContentType', + enum="ContentType", ) - feed_output_config: 'FeedOutputConfig' = proto.Field( + feed_output_config: "FeedOutputConfig" = proto.Field( proto.MESSAGE, number=5, - message='FeedOutputConfig', + message="FeedOutputConfig", ) condition: expr_pb2.Expr = proto.Field( proto.MESSAGE, @@ -1726,7 +1727,7 @@ class ConditionContext(proto.Message): access_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, - oneof='TimeContext', + oneof="TimeContext", message=timestamp_pb2.Timestamp, ) @@ -1802,10 +1803,10 @@ class AnalyzeIamPolicyRequest(proto.Message): Default is empty. """ - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - message='IamPolicyAnalysisQuery', + message="IamPolicyAnalysisQuery", ) saved_analysis_query: str = proto.Field( proto.STRING, @@ -1858,24 +1859,28 @@ class IamPolicyAnalysis(proto.Message): the query handling. """ - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - message='IamPolicyAnalysisQuery', + message="IamPolicyAnalysisQuery", ) - analysis_results: MutableSequence[gca_assets.IamPolicyAnalysisResult] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gca_assets.IamPolicyAnalysisResult, + analysis_results: MutableSequence[gca_assets.IamPolicyAnalysisResult] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gca_assets.IamPolicyAnalysisResult, + ) ) fully_explored: bool = proto.Field( proto.BOOL, number=3, ) - non_critical_errors: MutableSequence[gca_assets.IamPolicyAnalysisState] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=gca_assets.IamPolicyAnalysisState, + non_critical_errors: MutableSequence[gca_assets.IamPolicyAnalysisState] = ( + proto.RepeatedField( + proto.MESSAGE, + number=5, + message=gca_assets.IamPolicyAnalysisState, + ) ) main_analysis: IamPolicyAnalysis = proto.Field( @@ -1883,10 +1888,12 @@ class IamPolicyAnalysis(proto.Message): number=1, message=IamPolicyAnalysis, ) - service_account_impersonation_analysis: MutableSequence[IamPolicyAnalysis] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=IamPolicyAnalysis, + service_account_impersonation_analysis: MutableSequence[IamPolicyAnalysis] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message=IamPolicyAnalysis, + ) ) fully_explored: bool = proto.Field( proto.BOOL, @@ -1982,6 +1989,7 @@ class BigQueryDestination(proto.Message): successfully. Details are at https://cloud.google.com/bigquery/docs/loading-data-local#appending_to_or_overwriting_a_table_using_a_local_file. """ + class PartitionKey(proto.Enum): r"""This enum determines the partition key column for the bigquery tables. Partitioning can improve query performance and @@ -2000,6 +2008,7 @@ class PartitionKey(proto.Enum): additional timestamp column representing when the request was received. """ + PARTITION_KEY_UNSPECIFIED = 0 REQUEST_TIME = 1 @@ -2011,10 +2020,10 @@ class PartitionKey(proto.Enum): proto.STRING, number=2, ) - partition_key: 'IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey' = proto.Field( + partition_key: "IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey" = proto.Field( proto.ENUM, number=3, - enum='IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey', + enum="IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey", ) write_disposition: str = proto.Field( proto.STRING, @@ -2024,13 +2033,13 @@ class PartitionKey(proto.Enum): gcs_destination: GcsDestination = proto.Field( proto.MESSAGE, number=1, - oneof='destination', + oneof="destination", message=GcsDestination, ) bigquery_destination: BigQueryDestination = proto.Field( proto.MESSAGE, number=2, - oneof='destination', + oneof="destination", message=BigQueryDestination, ) @@ -2066,19 +2075,19 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): where the results will be output to. """ - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - message='IamPolicyAnalysisQuery', + message="IamPolicyAnalysisQuery", ) saved_analysis_query: str = proto.Field( proto.STRING, number=3, ) - output_config: 'IamPolicyAnalysisOutputConfig' = proto.Field( + output_config: "IamPolicyAnalysisOutputConfig" = proto.Field( proto.MESSAGE, number=2, - message='IamPolicyAnalysisOutputConfig', + message="IamPolicyAnalysisOutputConfig", ) @@ -2139,11 +2148,11 @@ class QueryContent(proto.Message): This field is a member of `oneof`_ ``query_content``. """ - iam_policy_analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + iam_policy_analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - oneof='query_content', - message='IamPolicyAnalysisQuery', + oneof="query_content", + message="IamPolicyAnalysisQuery", ) name: str = proto.Field( @@ -2216,10 +2225,10 @@ class CreateSavedQueryRequest(proto.Message): proto.STRING, number=1, ) - saved_query: 'SavedQuery' = proto.Field( + saved_query: "SavedQuery" = proto.Field( proto.MESSAGE, number=2, - message='SavedQuery', + message="SavedQuery", ) saved_query_id: str = proto.Field( proto.STRING, @@ -2317,10 +2326,10 @@ class ListSavedQueriesResponse(proto.Message): def raw_page(self): return self - saved_queries: MutableSequence['SavedQuery'] = proto.RepeatedField( + saved_queries: MutableSequence["SavedQuery"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='SavedQuery', + message="SavedQuery", ) next_page_token: str = proto.Field( proto.STRING, @@ -2345,10 +2354,10 @@ class UpdateSavedQueryRequest(proto.Message): Required. The list of fields to update. """ - saved_query: 'SavedQuery' = proto.Field( + saved_query: "SavedQuery" = proto.Field( proto.MESSAGE, number=1, - message='SavedQuery', + message="SavedQuery", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2400,6 +2409,7 @@ class AnalyzeMoveRequest(proto.Message): should be included in the analysis response. If unspecified, the default view is FULL. """ + class AnalysisView(proto.Enum): r"""View enum for supporting partial analysis responses. @@ -2415,6 +2425,7 @@ class AnalysisView(proto.Enum): will prevent the specified resource move at runtime. """ + ANALYSIS_VIEW_UNSPECIFIED = 0 FULL = 1 BASIC = 2 @@ -2445,10 +2456,10 @@ class AnalyzeMoveResponse(proto.Message): services. """ - move_analysis: MutableSequence['MoveAnalysis'] = proto.RepeatedField( + move_analysis: MutableSequence["MoveAnalysis"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='MoveAnalysis', + message="MoveAnalysis", ) @@ -2482,16 +2493,16 @@ class MoveAnalysis(proto.Message): proto.STRING, number=1, ) - analysis: 'MoveAnalysisResult' = proto.Field( + analysis: "MoveAnalysisResult" = proto.Field( proto.MESSAGE, number=2, - oneof='result', - message='MoveAnalysisResult', + oneof="result", + message="MoveAnalysisResult", ) error: status_pb2.Status = proto.Field( proto.MESSAGE, number=3, - oneof='result', + oneof="result", message=status_pb2.Status, ) @@ -2512,15 +2523,15 @@ class MoveAnalysisResult(proto.Message): but will not block moves at runtime. """ - blockers: MutableSequence['MoveImpact'] = proto.RepeatedField( + blockers: MutableSequence["MoveImpact"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='MoveImpact', + message="MoveImpact", ) - warnings: MutableSequence['MoveImpact'] = proto.RepeatedField( + warnings: MutableSequence["MoveImpact"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='MoveImpact', + message="MoveImpact", ) @@ -2688,12 +2699,12 @@ class QueryAssetsRequest(proto.Message): statement: str = proto.Field( proto.STRING, number=2, - oneof='query', + oneof="query", ) job_reference: str = proto.Field( proto.STRING, number=3, - oneof='query', + oneof="query", ) page_size: int = proto.Field( proto.INT32, @@ -2711,19 +2722,19 @@ class QueryAssetsRequest(proto.Message): read_time_window: gca_assets.TimeWindow = proto.Field( proto.MESSAGE, number=7, - oneof='time', + oneof="time", message=gca_assets.TimeWindow, ) read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=8, - oneof='time', + oneof="time", message=timestamp_pb2.Timestamp, ) - output_config: 'QueryAssetsOutputConfig' = proto.Field( + output_config: "QueryAssetsOutputConfig" = proto.Field( proto.MESSAGE, number=9, - message='QueryAssetsOutputConfig', + message="QueryAssetsOutputConfig", ) @@ -2776,20 +2787,20 @@ class QueryAssetsResponse(proto.Message): error: status_pb2.Status = proto.Field( proto.MESSAGE, number=3, - oneof='response', + oneof="response", message=status_pb2.Status, ) - query_result: 'QueryResult' = proto.Field( + query_result: "QueryResult" = proto.Field( proto.MESSAGE, number=4, - oneof='response', - message='QueryResult', + oneof="response", + message="QueryResult", ) - output_config: 'QueryAssetsOutputConfig' = proto.Field( + output_config: "QueryAssetsOutputConfig" = proto.Field( proto.MESSAGE, number=5, - oneof='response', - message='QueryAssetsOutputConfig', + oneof="response", + message="QueryAssetsOutputConfig", ) @@ -2821,10 +2832,10 @@ def raw_page(self): number=1, message=struct_pb2.Struct, ) - schema: 'TableSchema' = proto.Field( + schema: "TableSchema" = proto.Field( proto.MESSAGE, number=2, - message='TableSchema', + message="TableSchema", ) next_page_token: str = proto.Field( proto.STRING, @@ -2844,10 +2855,10 @@ class TableSchema(proto.Message): Describes the fields in a table. """ - fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( + fields: MutableSequence["TableFieldSchema"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='TableFieldSchema', + message="TableFieldSchema", ) @@ -2898,10 +2909,10 @@ class TableFieldSchema(proto.Message): proto.STRING, number=3, ) - fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( + fields: MutableSequence["TableFieldSchema"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='TableFieldSchema', + message="TableFieldSchema", ) @@ -3020,10 +3031,12 @@ class PolicyInfo(proto.Message): proto.STRING, number=1, ) - policies: MutableSequence['BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo'] = proto.RepeatedField( + policies: MutableSequence[ + "BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo" + ] = proto.RepeatedField( proto.MESSAGE, number=2, - message='BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo', + message="BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo", ) policy_results: MutableSequence[EffectiveIamPolicy] = proto.RepeatedField( @@ -3131,26 +3144,26 @@ class StringValues(proto.Message): number=2, ) - values: 'AnalyzerOrgPolicy.Rule.StringValues' = proto.Field( + values: "AnalyzerOrgPolicy.Rule.StringValues" = proto.Field( proto.MESSAGE, number=3, - oneof='kind', - message='AnalyzerOrgPolicy.Rule.StringValues', + oneof="kind", + message="AnalyzerOrgPolicy.Rule.StringValues", ) allow_all: bool = proto.Field( proto.BOOL, number=4, - oneof='kind', + oneof="kind", ) deny_all: bool = proto.Field( proto.BOOL, number=5, - oneof='kind', + oneof="kind", ) enforce: bool = proto.Field( proto.BOOL, number=6, - oneof='kind', + oneof="kind", ) condition: expr_pb2.Expr = proto.Field( proto.MESSAGE, @@ -3241,6 +3254,7 @@ class Constraint(proto.Message): This field is a member of `oneof`_ ``constraint_type``. """ + class ConstraintDefault(proto.Enum): r"""Specifies the default behavior in the absence of any ``Policy`` for the ``Constraint``. This must not be @@ -3259,6 +3273,7 @@ class ConstraintDefault(proto.Enum): constraints. Indicate that enforcement is on for boolean constraints. """ + CONSTRAINT_DEFAULT_UNSPECIFIED = 0 ALLOW = 1 DENY = 2 @@ -3313,22 +3328,24 @@ class BooleanConstraint(proto.Message): proto.STRING, number=3, ) - constraint_default: 'AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault' = proto.Field( + constraint_default: "AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault" = proto.Field( proto.ENUM, number=4, - enum='AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault', + enum="AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault", ) - list_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.ListConstraint' = proto.Field( - proto.MESSAGE, - number=5, - oneof='constraint_type', - message='AnalyzerOrgPolicyConstraint.Constraint.ListConstraint', + list_constraint: "AnalyzerOrgPolicyConstraint.Constraint.ListConstraint" = ( + proto.Field( + proto.MESSAGE, + number=5, + oneof="constraint_type", + message="AnalyzerOrgPolicyConstraint.Constraint.ListConstraint", + ) ) - boolean_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint' = proto.Field( + boolean_constraint: "AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint" = proto.Field( proto.MESSAGE, number=6, - oneof='constraint_type', - message='AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint', + oneof="constraint_type", + message="AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint", ) class CustomConstraint(proto.Message): @@ -3363,6 +3380,7 @@ class CustomConstraint(proto.Message): Detailed information about this custom policy constraint. """ + class MethodType(proto.Enum): r"""The operation in which this constraint will be applied. For example: If the constraint applies only when create VMs, the method_types @@ -3382,6 +3400,7 @@ class MethodType(proto.Enum): Constraint applied when deleting the resource. """ + METHOD_TYPE_UNSPECIFIED = 0 CREATE = 1 UPDATE = 2 @@ -3398,6 +3417,7 @@ class ActionType(proto.Enum): DENY (2): Deny action type. """ + ACTION_TYPE_UNSPECIFIED = 0 ALLOW = 1 DENY = 2 @@ -3410,19 +3430,23 @@ class ActionType(proto.Enum): proto.STRING, number=2, ) - method_types: MutableSequence['AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType'] = proto.RepeatedField( + method_types: MutableSequence[ + "AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType" + ] = proto.RepeatedField( proto.ENUM, number=3, - enum='AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType', + enum="AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType", ) condition: str = proto.Field( proto.STRING, number=4, ) - action_type: 'AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType' = proto.Field( - proto.ENUM, - number=5, - enum='AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType', + action_type: "AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType" = ( + proto.Field( + proto.ENUM, + number=5, + enum="AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType", + ) ) display_name: str = proto.Field( proto.STRING, @@ -3436,13 +3460,13 @@ class ActionType(proto.Enum): google_defined_constraint: Constraint = proto.Field( proto.MESSAGE, number=1, - oneof='constraint_definition', + oneof="constraint_definition", message=Constraint, ) custom_constraint: CustomConstraint = proto.Field( proto.MESSAGE, number=2, - oneof='constraint_definition', + oneof="constraint_definition", message=CustomConstraint, ) @@ -3549,15 +3573,15 @@ class OrgPolicyResult(proto.Message): also appear in the list. """ - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + consolidated_policy: "AnalyzerOrgPolicy" = proto.Field( proto.MESSAGE, number=1, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + policy_bundle: MutableSequence["AnalyzerOrgPolicy"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) @property @@ -3569,10 +3593,10 @@ def raw_page(self): number=1, message=OrgPolicyResult, ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + constraint: "AnalyzerOrgPolicyConstraint" = proto.Field( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicyConstraint', + message="AnalyzerOrgPolicyConstraint", ) next_page_token: str = proto.Field( proto.STRING, @@ -3699,15 +3723,15 @@ class GovernedContainer(proto.Message): proto.STRING, number=2, ) - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + consolidated_policy: "AnalyzerOrgPolicy" = proto.Field( proto.MESSAGE, number=3, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + policy_bundle: MutableSequence["AnalyzerOrgPolicy"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) @property @@ -3719,10 +3743,10 @@ def raw_page(self): number=1, message=GovernedContainer, ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + constraint: "AnalyzerOrgPolicyConstraint" = proto.Field( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicyConstraint', + message="AnalyzerOrgPolicyConstraint", ) next_page_token: str = proto.Field( proto.STRING, @@ -3963,27 +3987,29 @@ class GovernedAsset(proto.Message): also appear in the list. """ - governed_resource: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource' = proto.Field( - proto.MESSAGE, - number=1, - oneof='governed_asset', - message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource', + governed_resource: "AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource" = ( + proto.Field( + proto.MESSAGE, + number=1, + oneof="governed_asset", + message="AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource", + ) ) - governed_iam_policy: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy' = proto.Field( + governed_iam_policy: "AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy" = proto.Field( proto.MESSAGE, number=2, - oneof='governed_asset', - message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy', + oneof="governed_asset", + message="AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy", ) - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + consolidated_policy: "AnalyzerOrgPolicy" = proto.Field( proto.MESSAGE, number=3, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + policy_bundle: MutableSequence["AnalyzerOrgPolicy"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) @property @@ -3995,10 +4021,10 @@ def raw_page(self): number=1, message=GovernedAsset, ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + constraint: "AnalyzerOrgPolicyConstraint" = proto.Field( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicyConstraint', + message="AnalyzerOrgPolicyConstraint", ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index e3e94d6271a2..d7762146452f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -17,8 +17,6 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.cloud.orgpolicy.v1.orgpolicy_pb2 as orgpolicy_pb2 # type: ignore import google.cloud.osconfig.v1.inventory_pb2 as inventory_pb2 # type: ignore import google.iam.v1.policy_pb2 as policy_pb2 # type: ignore @@ -28,27 +26,27 @@ import google.protobuf.struct_pb2 as struct_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.rpc.code_pb2 as code_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.asset.v1', + package="google.cloud.asset.v1", manifest={ - 'TemporalAsset', - 'TimeWindow', - 'Asset', - 'Resource', - 'RelatedAssets', - 'RelationshipAttributes', - 'RelatedAsset', - 'ResourceSearchResult', - 'VersionedResource', - 'AttachedResource', - 'RelatedResources', - 'RelatedResource', - 'IamPolicySearchResult', - 'IamPolicyAnalysisState', - 'ConditionEvaluation', - 'IamPolicyAnalysisResult', + "TemporalAsset", + "TimeWindow", + "Asset", + "Resource", + "RelatedAssets", + "RelationshipAttributes", + "RelatedAsset", + "ResourceSearchResult", + "VersionedResource", + "AttachedResource", + "RelatedResources", + "RelatedResource", + "IamPolicySearchResult", + "IamPolicyAnalysisState", + "ConditionEvaluation", + "IamPolicyAnalysisResult", }, ) @@ -73,6 +71,7 @@ class TemporalAsset(proto.Message): PRESENT. Currently this is only set for responses in Real-Time Feed. """ + class PriorAssetState(proto.Enum): r"""State of prior asset. @@ -88,35 +87,36 @@ class PriorAssetState(proto.Enum): DELETED (4): prior_asset is a deletion. """ + PRIOR_ASSET_STATE_UNSPECIFIED = 0 PRESENT = 1 INVALID = 2 DOES_NOT_EXIST = 3 DELETED = 4 - window: 'TimeWindow' = proto.Field( + window: "TimeWindow" = proto.Field( proto.MESSAGE, number=1, - message='TimeWindow', + message="TimeWindow", ) deleted: bool = proto.Field( proto.BOOL, number=2, ) - asset: 'Asset' = proto.Field( + asset: "Asset" = proto.Field( proto.MESSAGE, number=3, - message='Asset', + message="Asset", ) prior_asset_state: PriorAssetState = proto.Field( proto.ENUM, number=4, enum=PriorAssetState, ) - prior_asset: 'Asset' = proto.Field( + prior_asset: "Asset" = proto.Field( proto.MESSAGE, number=5, - message='Asset', + message="Asset", ) @@ -253,10 +253,10 @@ class Asset(proto.Message): proto.STRING, number=2, ) - resource: 'Resource' = proto.Field( + resource: "Resource" = proto.Field( proto.MESSAGE, number=3, - message='Resource', + message="Resource", ) iam_policy: policy_pb2.Policy = proto.Field( proto.MESSAGE, @@ -271,19 +271,19 @@ class Asset(proto.Message): access_policy: access_policy_pb2.AccessPolicy = proto.Field( proto.MESSAGE, number=7, - oneof='access_context_policy', + oneof="access_context_policy", message=access_policy_pb2.AccessPolicy, ) access_level: access_level_pb2.AccessLevel = proto.Field( proto.MESSAGE, number=8, - oneof='access_context_policy', + oneof="access_context_policy", message=access_level_pb2.AccessLevel, ) service_perimeter: service_perimeter_pb2.ServicePerimeter = proto.Field( proto.MESSAGE, number=9, - oneof='access_context_policy', + oneof="access_context_policy", message=service_perimeter_pb2.ServicePerimeter, ) os_inventory: inventory_pb2.Inventory = proto.Field( @@ -291,15 +291,15 @@ class Asset(proto.Message): number=12, message=inventory_pb2.Inventory, ) - related_assets: 'RelatedAssets' = proto.Field( + related_assets: "RelatedAssets" = proto.Field( proto.MESSAGE, number=13, - message='RelatedAssets', + message="RelatedAssets", ) - related_asset: 'RelatedAsset' = proto.Field( + related_asset: "RelatedAsset" = proto.Field( proto.MESSAGE, number=15, - message='RelatedAsset', + message="RelatedAsset", ) ancestors: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -400,15 +400,15 @@ class RelatedAssets(proto.Message): The peer resources of the relationship. """ - relationship_attributes: 'RelationshipAttributes' = proto.Field( + relationship_attributes: "RelationshipAttributes" = proto.Field( proto.MESSAGE, number=1, - message='RelationshipAttributes', + message="RelationshipAttributes", ) - assets: MutableSequence['RelatedAsset'] = proto.RepeatedField( + assets: MutableSequence["RelatedAsset"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='RelatedAsset', + message="RelatedAsset", ) @@ -888,21 +888,21 @@ class ResourceSearchResult(proto.Message): proto.STRING, number=19, ) - versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( + versioned_resources: MutableSequence["VersionedResource"] = proto.RepeatedField( proto.MESSAGE, number=16, - message='VersionedResource', + message="VersionedResource", ) - attached_resources: MutableSequence['AttachedResource'] = proto.RepeatedField( + attached_resources: MutableSequence["AttachedResource"] = proto.RepeatedField( proto.MESSAGE, number=20, - message='AttachedResource', + message="AttachedResource", ) - relationships: MutableMapping[str, 'RelatedResources'] = proto.MapField( + relationships: MutableMapping[str, "RelatedResources"] = proto.MapField( proto.STRING, proto.MESSAGE, number=21, - message='RelatedResources', + message="RelatedResources", ) tag_keys: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -985,10 +985,10 @@ class AttachedResource(proto.Message): proto.STRING, number=1, ) - versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( + versioned_resources: MutableSequence["VersionedResource"] = proto.RepeatedField( proto.MESSAGE, number=3, - message='VersionedResource', + message="VersionedResource", ) @@ -1001,10 +1001,10 @@ class RelatedResources(proto.Message): resource. """ - related_resources: MutableSequence['RelatedResource'] = proto.RepeatedField( + related_resources: MutableSequence["RelatedResource"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='RelatedResource', + message="RelatedResource", ) @@ -1142,11 +1142,13 @@ class Permissions(proto.Message): number=1, ) - matched_permissions: MutableMapping[str, 'IamPolicySearchResult.Explanation.Permissions'] = proto.MapField( + matched_permissions: MutableMapping[ + str, "IamPolicySearchResult.Explanation.Permissions" + ] = proto.MapField( proto.STRING, proto.MESSAGE, number=1, - message='IamPolicySearchResult.Explanation.Permissions', + message="IamPolicySearchResult.Explanation.Permissions", ) resource: str = proto.Field( @@ -1219,6 +1221,7 @@ class ConditionEvaluation(proto.Message): evaluation_value (google.cloud.asset_v1.types.ConditionEvaluation.EvaluationValue): The evaluation result. """ + class EvaluationValue(proto.Enum): r"""Value of this expression. @@ -1234,6 +1237,7 @@ class EvaluationValue(proto.Enum): expression contains variables that are either missing input values or have not been supported by Analyzer yet. """ + EVALUATION_VALUE_UNSPECIFIED = 0 TRUE = 1 FALSE = 2 @@ -1290,10 +1294,10 @@ class Resource(proto.Message): proto.STRING, number=1, ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( + analysis_state: "IamPolicyAnalysisState" = proto.Field( proto.MESSAGE, number=2, - message='IamPolicyAnalysisState', + message="IamPolicyAnalysisState", ) class Access(proto.Message): @@ -1322,17 +1326,17 @@ class Access(proto.Message): role: str = proto.Field( proto.STRING, number=1, - oneof='oneof_access', + oneof="oneof_access", ) permission: str = proto.Field( proto.STRING, number=2, - oneof='oneof_access', + oneof="oneof_access", ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( + analysis_state: "IamPolicyAnalysisState" = proto.Field( proto.MESSAGE, number=3, - message='IamPolicyAnalysisState', + message="IamPolicyAnalysisState", ) class Identity(proto.Message): @@ -1360,10 +1364,10 @@ class Identity(proto.Message): proto.STRING, number=1, ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( + analysis_state: "IamPolicyAnalysisState" = proto.Field( proto.MESSAGE, number=2, - message='IamPolicyAnalysisState', + message="IamPolicyAnalysisState", ) class Edge(proto.Message): @@ -1437,25 +1441,31 @@ class AccessControlList(proto.Message): defined in the above IAM policy binding. """ - resources: MutableSequence['IamPolicyAnalysisResult.Resource'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='IamPolicyAnalysisResult.Resource', + resources: MutableSequence["IamPolicyAnalysisResult.Resource"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="IamPolicyAnalysisResult.Resource", + ) ) - accesses: MutableSequence['IamPolicyAnalysisResult.Access'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='IamPolicyAnalysisResult.Access', + accesses: MutableSequence["IamPolicyAnalysisResult.Access"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="IamPolicyAnalysisResult.Access", + ) ) - resource_edges: MutableSequence['IamPolicyAnalysisResult.Edge'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='IamPolicyAnalysisResult.Edge', + resource_edges: MutableSequence["IamPolicyAnalysisResult.Edge"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=3, + message="IamPolicyAnalysisResult.Edge", + ) ) - condition_evaluation: 'ConditionEvaluation' = proto.Field( + condition_evaluation: "ConditionEvaluation" = proto.Field( proto.MESSAGE, number=4, - message='ConditionEvaluation', + message="ConditionEvaluation", ) class IdentityList(proto.Message): @@ -1483,15 +1493,19 @@ class IdentityList(proto.Message): enabled in request. """ - identities: MutableSequence['IamPolicyAnalysisResult.Identity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='IamPolicyAnalysisResult.Identity', + identities: MutableSequence["IamPolicyAnalysisResult.Identity"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="IamPolicyAnalysisResult.Identity", + ) ) - group_edges: MutableSequence['IamPolicyAnalysisResult.Edge'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='IamPolicyAnalysisResult.Edge', + group_edges: MutableSequence["IamPolicyAnalysisResult.Edge"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="IamPolicyAnalysisResult.Edge", + ) ) attached_resource_full_name: str = proto.Field( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index ca9b5afb08f6..6f668d988685 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -17,9 +17,8 @@ import pathlib import re import shutil - -from typing import Dict, List import warnings +from typing import Dict, List import nox @@ -154,7 +153,8 @@ def lint(session): # 2. Check formatting session.run( - "ruff", "format", + "ruff", + "format", "--check", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", @@ -167,12 +167,15 @@ def lint(session): @nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): """(Deprecated) Legacy session. Please use 'nox -s format'.""" - session.log("WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future.") + session.log( + "WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future." + ) # Just run the ruff formatter (keeping legacy behavior of only formatting, not sorting imports) session.install(RUFF_VERSION) session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", *LINT_PATHS, @@ -191,8 +194,10 @@ def format(session): # check --select I: Enables strict import sorting # --fix: Applies the changes automatically session.run( - "ruff", "check", - "--select", "I", + "ruff", + "check", + "--select", + "I", "--fix", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length @@ -201,7 +206,8 @@ def format(session): # 3. Run Ruff to format code session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length *LINT_PATHS, @@ -386,8 +392,10 @@ def docs(session): "sphinx-build", "-T", # show full traceback on exception "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + "-b", + "html", # builder + "-d", + os.path.join("docs", "_build", "doctrees", ""), # cache directory # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py index 972a32690352..0366d7e4fbef 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py @@ -52,4 +52,5 @@ async def sample_analyze_iam_policy(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py index ced3d6211229..ecfffce1bd80 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py @@ -60,4 +60,5 @@ async def sample_analyze_iam_policy_longrunning(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py index 603d16e9c0fe..f11eb0404534 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py @@ -60,4 +60,5 @@ def sample_analyze_iam_policy_longrunning(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py index 2e623980824a..f7dda0c2a1d3 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py @@ -52,4 +52,5 @@ def sample_analyze_iam_policy(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py index 1a09c5464486..dbaf7a43f356 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py @@ -50,4 +50,5 @@ async def sample_analyze_move(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeMove_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py index e1c4d693611b..9b99769d1508 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py @@ -50,4 +50,5 @@ def sample_analyze_move(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeMove_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py index b7c1b35df750..1051e32e3eeb 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py @@ -51,4 +51,5 @@ async def sample_analyze_org_policies(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py index 315543e7d5a3..b738f76fd51d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py @@ -51,4 +51,5 @@ def sample_analyze_org_policies(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py index 8ed6cc039b28..3b2eac7cad30 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py @@ -51,4 +51,5 @@ async def sample_analyze_org_policy_governed_assets(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py index a5e3393f985d..e6c0a8b405f9 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py @@ -51,4 +51,5 @@ def sample_analyze_org_policy_governed_assets(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py index 215869302790..248661ce34ab 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py @@ -51,4 +51,5 @@ async def sample_analyze_org_policy_governed_containers(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py index 4f52cbdb913c..0f351b016261 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py @@ -51,4 +51,5 @@ def sample_analyze_org_policy_governed_containers(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py index 185ec5b55028..2764fd7a51da 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py @@ -49,4 +49,5 @@ async def sample_batch_get_assets_history(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py index a4dbf5d89897..f02b9bdbf9d9 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py @@ -49,4 +49,5 @@ def sample_batch_get_assets_history(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py index 2a2112e96bb8..d1bf79824eec 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py @@ -41,7 +41,7 @@ async def sample_batch_get_effective_iam_policies(): # Initialize request argument(s) request = asset_v1.BatchGetEffectiveIamPoliciesRequest( scope="scope_value", - names=['names_value1', 'names_value2'], + names=["names_value1", "names_value2"], ) # Make the request @@ -50,4 +50,5 @@ async def sample_batch_get_effective_iam_policies(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py index 03874bb7410f..56409aeefead 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py @@ -41,7 +41,7 @@ def sample_batch_get_effective_iam_policies(): # Initialize request argument(s) request = asset_v1.BatchGetEffectiveIamPoliciesRequest( scope="scope_value", - names=['names_value1', 'names_value2'], + names=["names_value1", "names_value2"], ) # Make the request @@ -50,4 +50,5 @@ def sample_batch_get_effective_iam_policies(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py index 0bcd5fb1e550..c57f865ef6b8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py @@ -54,4 +54,5 @@ async def sample_create_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_CreateFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py index 2f8e11296274..bbc4716203c3 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py @@ -54,4 +54,5 @@ def sample_create_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_CreateFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py index a8b64d40ecac..cab2f1d1e9e0 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py @@ -50,4 +50,5 @@ async def sample_create_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_CreateSavedQuery_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py index 53cb726f86a8..c56a08171a98 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py @@ -50,4 +50,5 @@ def sample_create_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_CreateSavedQuery_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py index aad81a353e0d..850133239bc2 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py @@ -57,4 +57,5 @@ async def sample_export_assets(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_ExportAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py index 696b353c3757..fbb690ae9c2f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py @@ -57,4 +57,5 @@ def sample_export_assets(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_ExportAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py index 884b7d0034f5..78cc0df1b656 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py @@ -49,4 +49,5 @@ async def sample_get_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_GetFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py index 712a533b7155..fea5f827077e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py @@ -49,4 +49,5 @@ def sample_get_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_GetFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py index a24420bb5fc7..9bfa1bf2ccb3 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py @@ -49,4 +49,5 @@ async def sample_get_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_GetSavedQuery_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py index 291b88589c9b..b2ffea25208a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py @@ -49,4 +49,5 @@ def sample_get_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_GetSavedQuery_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py index 6afb977deb07..a3375bb53809 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py @@ -50,4 +50,5 @@ async def sample_list_assets(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_ListAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py index f7fc8801a1e9..0062409e4c7f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py @@ -50,4 +50,5 @@ def sample_list_assets(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_ListAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py index 64284321eb2d..5b641d54a7b0 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py @@ -49,4 +49,5 @@ async def sample_list_feeds(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_ListFeeds_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py index b9733d98cdb2..0de1d2dade3d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py @@ -49,4 +49,5 @@ def sample_list_feeds(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_ListFeeds_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py index 3d5f2095cb55..4ced4cc6d866 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py @@ -50,4 +50,5 @@ async def sample_list_saved_queries(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_ListSavedQueries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py index 9c096d9e60ef..300205d16ec4 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py @@ -50,4 +50,5 @@ def sample_list_saved_queries(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_ListSavedQueries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py index baaade80cdee..3fce0d2ec17b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py @@ -50,4 +50,5 @@ async def sample_query_assets(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_QueryAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py index a52f6818bd64..948136fa52b4 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py @@ -50,4 +50,5 @@ def sample_query_assets(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_QueryAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py index a62518e66584..6f0bc290b715 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py @@ -50,4 +50,5 @@ async def sample_search_all_iam_policies(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_SearchAllIamPolicies_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py index 71536d46c51d..e41d2480ef06 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py @@ -50,4 +50,5 @@ def sample_search_all_iam_policies(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_SearchAllIamPolicies_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py index aa480039bd46..adcfe0e98512 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py @@ -50,4 +50,5 @@ async def sample_search_all_resources(): async for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_SearchAllResources_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py index 26ea1bffc470..71db7ae3bbeb 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py @@ -50,4 +50,5 @@ def sample_search_all_resources(): for response in page_result: print(response) + # [END cloudasset_v1_generated_AssetService_SearchAllResources_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py index 23cc1a6f0d91..b04f10e9e06a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py @@ -52,4 +52,5 @@ async def sample_update_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_UpdateFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py index 36745fa8c988..48c02911cd1c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py @@ -52,4 +52,5 @@ def sample_update_feed(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_UpdateFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py index 269098a2574a..4ac0c5b1f9eb 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py @@ -39,8 +39,7 @@ async def sample_update_saved_query(): client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) - request = asset_v1.UpdateSavedQueryRequest( - ) + request = asset_v1.UpdateSavedQueryRequest() # Make the request response = await client.update_saved_query(request=request) @@ -48,4 +47,5 @@ async def sample_update_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_UpdateSavedQuery_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py index fb8864825e7e..5e95a9d87623 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py @@ -39,8 +39,7 @@ def sample_update_saved_query(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) - request = asset_v1.UpdateSavedQueryRequest( - ) + request = asset_v1.UpdateSavedQueryRequest() # Make the request response = client.update_saved_query(request=request) @@ -48,4 +47,5 @@ def sample_update_saved_query(): # Handle the response print(response) + # [END cloudasset_v1_generated_AssetService_UpdateSavedQuery_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 44bb98121411..85ac691eeb23 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -17,20 +17,20 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-cloud-asset' +name = "google-cloud-asset" description = "Google Cloud Asset API client library" version = None -with open(os.path.join(package_root, 'google/cloud/asset/gapic_version.py')) as fp: +with open(os.path.join(package_root, "google/cloud/asset/gapic_version.py")) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": @@ -52,8 +52,7 @@ "google-cloud-os-config >= 1.13.0, <2.0.0", "grpc-google-iam-v1 >= 0.14.0, <1.0.0", ] -extras = { -} +extras = {} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 36f92b53433b..df028d216029 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,48 +22,27 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format import json import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc import pytest -from collections.abc import Sequence, Mapping from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.asset_v1.services.asset_service import AssetServiceAsyncClient -from google.cloud.asset_v1.services.asset_service import AssetServiceClient -from google.cloud.asset_v1.services.asset_service import pagers -from google.cloud.asset_v1.services.asset_service import transports -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account import google.api_core.operation_async as operation_async # type: ignore import google.auth import google.protobuf.duration_pb2 as duration_pb2 # type: ignore @@ -70,8 +50,29 @@ import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.rpc.status_pb2 as status_pb2 # type: ignore import google.type.expr_pb2 as expr_pb2 # type: ignore - - +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.asset_v1.services.asset_service import ( + AssetServiceAsyncClient, + AssetServiceClient, + pagers, + transports, +) +from google.cloud.asset_v1.types import asset_service, assets +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -86,9 +87,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -96,17 +99,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -118,12 +131,27 @@ def test__get_default_mtls_endpoint(): custom_endpoint = ".custom" assert AssetServiceClient._get_default_mtls_endpoint(None) is None - assert AssetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ( + AssetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + AssetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AssetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - assert AssetServiceClient._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + assert ( + AssetServiceClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + def test__read_environment_variables(): assert AssetServiceClient._read_environment_variables() == (False, "auto", None) @@ -146,16 +174,24 @@ def test__read_environment_variables(): ) else: assert AssetServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AssetServiceClient._read_environment_variables() == ( False, - "auto", + "never", None, ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AssetServiceClient._read_environment_variables() == (False, "never", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AssetServiceClient._read_environment_variables() == (False, "always", None) + assert AssetServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): assert AssetServiceClient._read_environment_variables() == (False, "auto", None) @@ -163,10 +199,17 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: AssetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", "foo.com") + assert AssetServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -175,7 +218,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert AssetServiceClient._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -183,7 +228,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert AssetServiceClient._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -195,7 +242,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert AssetServiceClient._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -207,7 +256,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert AssetServiceClient._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -219,7 +270,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert AssetServiceClient._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -234,83 +287,167 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): AssetServiceClient._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert AssetServiceClient._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert AssetServiceClient._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert AssetServiceClient._get_client_cert_source(None, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + AssetServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + AssetServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + AssetServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + AssetServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AssetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) +@mock.patch.object( + AssetServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceClient), +) +@mock.patch.object( + AssetServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceAsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert AssetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + AssetServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + AssetServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AssetServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + AssetServiceClient._get_api_endpoint(None, None, default_universe, "always") + == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AssetServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AssetServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + AssetServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + AssetServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert AssetServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AssetServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AssetServiceClient._get_universe_domain(None, None) == AssetServiceClient._DEFAULT_UNIVERSE + assert ( + AssetServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + AssetServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + AssetServiceClient._get_universe_domain(None, None) + == AssetServiceClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: AssetServiceClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -326,7 +463,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -339,14 +477,20 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AssetServiceClient, "grpc"), + (AssetServiceAsyncClient, "grpc_asyncio"), + (AssetServiceClient, "rest"), + ], +) def test_asset_service_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -354,52 +498,68 @@ def test_asset_service_client_from_service_account_info(client_class, transport_ assert isinstance(client, client_class) assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' + "cloudasset.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudasset.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AssetServiceGrpcTransport, "grpc"), - (transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AssetServiceGrpcTransport, "grpc"), + (transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AssetServiceRestTransport, "rest"), + ], +) +def test_asset_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AssetServiceClient, "grpc"), + (AssetServiceAsyncClient, "grpc_asyncio"), + (AssetServiceClient, "rest"), + ], +) def test_asset_service_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' + "cloudasset.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudasset.googleapis.com" ) @@ -415,30 +575,45 @@ def test_asset_service_client_get_transport_class(): assert transport == transports.AssetServiceGrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test_asset_service_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + AssetServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceClient), +) +@mock.patch.object( + AssetServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceAsyncClient), +) +def test_asset_service_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(AssetServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: + with mock.patch.object(AssetServiceClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -456,13 +631,15 @@ def test_asset_service_client_client_options(client_class, transport_class, tran # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -474,7 +651,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -494,17 +671,22 @@ def test_asset_service_client_client_options(client_class, transport_class, tran with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -513,48 +695,82 @@ def test_asset_service_client_client_options(client_class, transport_class, tran api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "true"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "true"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "true"), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "true"), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + AssetServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceClient), +) +@mock.patch.object( + AssetServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceAsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_asset_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_asset_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -573,12 +789,22 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -599,15 +825,22 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -617,19 +850,27 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans ) -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) +@pytest.mark.parametrize("client_class", [AssetServiceClient, AssetServiceAsyncClient]) +@mock.patch.object( + AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient) +) +@mock.patch.object( + AssetServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AssetServiceAsyncClient), +) def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -637,18 +878,25 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -685,23 +933,23 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -732,23 +980,23 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -764,16 +1012,27 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -783,27 +1042,48 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) +@pytest.mark.parametrize("client_class", [AssetServiceClient, AssetServiceAsyncClient]) +@mock.patch.object( + AssetServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceClient), +) +@mock.patch.object( + AssetServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AssetServiceAsyncClient), +) def test_asset_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -826,11 +1106,19 @@ def test_asset_service_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -838,27 +1126,40 @@ def test_asset_service_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), + ], +) +def test_asset_service_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -867,24 +1168,40 @@ def test_asset_service_client_client_options_scopes(client_class, transport_clas api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", None), -]) -def test_asset_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AssetServiceClient, + transports.AssetServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest", None), + ], +) +def test_asset_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -893,12 +1210,13 @@ def test_asset_service_client_client_options_credentials_file(client_class, tran api_audience=None, ) + def test_asset_service_client_client_options_from_dict(): - with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceGrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.asset_v1.services.asset_service.transports.AssetServiceGrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None - client = AssetServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) + client = AssetServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) grpc_transport.assert_called_once_with( credentials=None, credentials_file=None, @@ -912,23 +1230,38 @@ def test_asset_service_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_asset_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AssetServiceClient, + transports.AssetServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AssetServiceAsyncClient, + transports.AssetServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_asset_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -938,13 +1271,13 @@ def test_asset_service_client_create_channel_credentials_file(client_class, tran ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -955,9 +1288,7 @@ def test_asset_service_client_create_channel_credentials_file(client_class, tran credentials=file_creds, credentials_file=None, quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=None, default_host="cloudasset.googleapis.com", ssl_credentials=None, @@ -968,11 +1299,14 @@ def test_asset_service_client_create_channel_credentials_file(client_class, tran ) -@pytest.mark.parametrize("request_type", [ - asset_service.ExportAssetsRequest, - dict, -]) -def test_export_assets(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ExportAssetsRequest, + dict, + ], +) +def test_export_assets(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -983,11 +1317,9 @@ def test_export_assets(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.export_assets(request) # Establish that the underlying gRPC stub method was called. @@ -1005,28 +1337,29 @@ def test_export_assets_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ExportAssetsRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.export_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ExportAssetsRequest( - parent='parent_value', + parent="parent_value", ) + def test_export_assets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1045,7 +1378,9 @@ def test_export_assets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.export_assets] = mock_rpc request = {} client.export_assets(request) @@ -1064,8 +1399,11 @@ def test_export_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_export_assets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1079,12 +1417,17 @@ async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.export_assets in client._client._transport._wrapped_methods + assert ( + client._client._transport.export_assets + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.export_assets] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.export_assets + ] = mock_rpc request = {} await client.export_assets(request) @@ -1103,8 +1446,11 @@ async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest): +async def test_export_assets_async( + transport: str = "grpc_asyncio", request_type=asset_service.ExportAssetsRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1115,12 +1461,10 @@ async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.export_assets(request) @@ -1138,6 +1482,7 @@ async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type async def test_export_assets_async_from_dict(): await test_export_assets_async(request_type=dict) + def test_export_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1147,13 +1492,11 @@ def test_export_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ExportAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.export_assets(request) # Establish that the underlying gRPC stub method was called. @@ -1164,9 +1507,9 @@ def test_export_assets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1179,13 +1522,13 @@ async def test_export_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ExportAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.export_assets(request) # Establish that the underlying gRPC stub method was called. @@ -1196,16 +1539,19 @@ async def test_export_assets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, - dict, -]) -def test_list_assets(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListAssetsRequest, + dict, + ], +) +def test_list_assets(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1216,12 +1562,10 @@ def test_list_assets(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_assets(request) @@ -1233,7 +1577,7 @@ def test_list_assets(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_assets_non_empty_request_with_auto_populated_field(): @@ -1241,30 +1585,31 @@ def test_list_assets_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_assets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1283,7 +1628,9 @@ def test_list_assets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc request = {} client.list_assets(request) @@ -1297,8 +1644,11 @@ def test_list_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_assets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1312,12 +1662,17 @@ async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_assets in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_assets + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_assets + ] = mock_rpc request = {} await client.list_assets(request) @@ -1331,8 +1686,11 @@ async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): +async def test_list_assets_async( + transport: str = "grpc_asyncio", request_type=asset_service.ListAssetsRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1343,13 +1701,13 @@ async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=a request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListAssetsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_assets(request) # Establish that the underlying gRPC stub method was called. @@ -1360,13 +1718,14 @@ async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAssetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_assets_async_from_dict(): await test_list_assets_async(request_type=dict) + def test_list_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1376,12 +1735,10 @@ def test_list_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ListAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: call.return_value = asset_service.ListAssetsResponse() client.list_assets(request) @@ -1393,9 +1750,9 @@ def test_list_assets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1408,13 +1765,13 @@ async def test_list_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ListAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse()) + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListAssetsResponse() + ) await client.list_assets(request) # Establish that the underlying gRPC stub method was called. @@ -1425,9 +1782,9 @@ async def test_list_assets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_assets_flattened(): @@ -1436,15 +1793,13 @@ def test_list_assets_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListAssetsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_assets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1452,7 +1807,7 @@ def test_list_assets_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1466,9 +1821,10 @@ def test_list_assets_flattened_error(): with pytest.raises(ValueError): client.list_assets( asset_service.ListAssetsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_assets_flattened_async(): client = AssetServiceAsyncClient( @@ -1476,17 +1832,17 @@ async def test_list_assets_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListAssetsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListAssetsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_assets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1494,9 +1850,10 @@ async def test_list_assets_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_assets_flattened_error_async(): client = AssetServiceAsyncClient( @@ -1508,7 +1865,7 @@ async def test_list_assets_flattened_error_async(): with pytest.raises(ValueError): await client.list_assets( asset_service.ListAssetsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1519,9 +1876,7 @@ def test_list_assets_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListAssetsResponse( @@ -1530,17 +1885,17 @@ def test_list_assets_pager(transport_name: str = "grpc"): assets.Asset(), assets.Asset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListAssetsResponse( assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListAssetsResponse( assets=[ assets.Asset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListAssetsResponse( assets=[ @@ -1555,9 +1910,7 @@ def test_list_assets_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_assets(request={}, retry=retry, timeout=timeout) @@ -1567,8 +1920,9 @@ def test_list_assets_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.Asset) - for i in results) + assert all(isinstance(i, assets.Asset) for i in results) + + def test_list_assets_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1576,9 +1930,7 @@ def test_list_assets_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListAssetsResponse( @@ -1587,17 +1939,17 @@ def test_list_assets_pages(transport_name: str = "grpc"): assets.Asset(), assets.Asset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListAssetsResponse( assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListAssetsResponse( assets=[ assets.Asset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListAssetsResponse( assets=[ @@ -1608,9 +1960,10 @@ def test_list_assets_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_assets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_assets_async_pager(): client = AssetServiceAsyncClient( @@ -1619,8 +1972,8 @@ async def test_list_assets_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_assets), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListAssetsResponse( @@ -1629,17 +1982,17 @@ async def test_list_assets_async_pager(): assets.Asset(), assets.Asset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListAssetsResponse( assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListAssetsResponse( assets=[ assets.Asset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListAssetsResponse( assets=[ @@ -1649,15 +2002,16 @@ async def test_list_assets_async_pager(): ), RuntimeError, ) - async_pager = await client.list_assets(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_assets( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, assets.Asset) - for i in responses) + assert all(isinstance(i, assets.Asset) for i in responses) @pytest.mark.asyncio @@ -1668,8 +2022,8 @@ async def test_list_assets_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_assets), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListAssetsResponse( @@ -1678,17 +2032,17 @@ async def test_list_assets_async_pages(): assets.Asset(), assets.Asset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListAssetsResponse( assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListAssetsResponse( assets=[ assets.Asset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListAssetsResponse( assets=[ @@ -1701,18 +2055,22 @@ async def test_list_assets_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_assets(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetAssetsHistoryRequest, - dict, -]) -def test_batch_get_assets_history(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.BatchGetAssetsHistoryRequest, + dict, + ], +) +def test_batch_get_assets_history(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1724,11 +2082,10 @@ def test_batch_get_assets_history(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = asset_service.BatchGetAssetsHistoryResponse( - ) + call.return_value = asset_service.BatchGetAssetsHistoryResponse() response = client.batch_get_assets_history(request) # Establish that the underlying gRPC stub method was called. @@ -1746,28 +2103,31 @@ def test_batch_get_assets_history_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.BatchGetAssetsHistoryRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.batch_get_assets_history), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.batch_get_assets_history(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetAssetsHistoryRequest( - parent='parent_value', + parent="parent_value", ) + def test_batch_get_assets_history_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1782,12 +2142,19 @@ def test_batch_get_assets_history_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.batch_get_assets_history in client._transport._wrapped_methods + assert ( + client._transport.batch_get_assets_history + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_assets_history] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_assets_history + ] = mock_rpc request = {} client.batch_get_assets_history(request) @@ -1800,8 +2167,11 @@ def test_batch_get_assets_history_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_batch_get_assets_history_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1815,12 +2185,17 @@ async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.batch_get_assets_history in client._client._transport._wrapped_methods + assert ( + client._client._transport.batch_get_assets_history + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_get_assets_history] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_assets_history + ] = mock_rpc request = {} await client.batch_get_assets_history(request) @@ -1834,8 +2209,12 @@ async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest): +async def test_batch_get_assets_history_async( + transport: str = "grpc_asyncio", + request_type=asset_service.BatchGetAssetsHistoryRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1847,11 +2226,12 @@ async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', r # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetAssetsHistoryResponse() + ) response = await client.batch_get_assets_history(request) # Establish that the underlying gRPC stub method was called. @@ -1868,6 +2248,7 @@ async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', r async def test_batch_get_assets_history_async_from_dict(): await test_batch_get_assets_history_async(request_type=dict) + def test_batch_get_assets_history_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1877,12 +2258,12 @@ def test_batch_get_assets_history_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetAssetsHistoryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: call.return_value = asset_service.BatchGetAssetsHistoryResponse() client.batch_get_assets_history(request) @@ -1894,9 +2275,9 @@ def test_batch_get_assets_history_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1909,13 +2290,15 @@ async def test_batch_get_assets_history_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetAssetsHistoryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse()) + type(client.transport.batch_get_assets_history), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetAssetsHistoryResponse() + ) await client.batch_get_assets_history(request) # Establish that the underlying gRPC stub method was called. @@ -1926,16 +2309,19 @@ async def test_batch_get_assets_history_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, - dict, -]) -def test_create_feed(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.CreateFeedRequest, + dict, + ], +) +def test_create_feed(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1946,16 +2332,14 @@ def test_create_feed(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) response = client.create_feed(request) @@ -1967,11 +2351,11 @@ def test_create_feed(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] def test_create_feed_non_empty_request_with_auto_populated_field(): @@ -1979,30 +2363,31 @@ def test_create_feed_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.CreateFeedRequest( - parent='parent_value', - feed_id='feed_id_value', + parent="parent_value", + feed_id="feed_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateFeedRequest( - parent='parent_value', - feed_id='feed_id_value', + parent="parent_value", + feed_id="feed_id_value", ) + def test_create_feed_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2021,7 +2406,9 @@ def test_create_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc request = {} client.create_feed(request) @@ -2035,8 +2422,11 @@ def test_create_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_feed_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2050,12 +2440,17 @@ async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_feed in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_feed + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_feed] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_feed + ] = mock_rpc request = {} await client.create_feed(request) @@ -2069,8 +2464,11 @@ async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): +async def test_create_feed_async( + transport: str = "grpc_asyncio", request_type=asset_service.CreateFeedRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2081,17 +2479,17 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) response = await client.create_feed(request) # Establish that the underlying gRPC stub method was called. @@ -2102,17 +2500,18 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.asyncio async def test_create_feed_async_from_dict(): await test_create_feed_async(request_type=dict) + def test_create_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2122,12 +2521,10 @@ def test_create_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.CreateFeedRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: call.return_value = asset_service.Feed() client.create_feed(request) @@ -2139,9 +2536,9 @@ def test_create_feed_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2154,12 +2551,10 @@ async def test_create_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.CreateFeedRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) await client.create_feed(request) @@ -2171,9 +2566,9 @@ async def test_create_feed_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_feed_flattened(): @@ -2182,15 +2577,13 @@ def test_create_feed_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_feed( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2198,7 +2591,7 @@ def test_create_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -2212,9 +2605,10 @@ def test_create_feed_flattened_error(): with pytest.raises(ValueError): client.create_feed( asset_service.CreateFeedRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_create_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -2222,9 +2616,7 @@ async def test_create_feed_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() @@ -2232,7 +2624,7 @@ async def test_create_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_feed( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2240,9 +2632,10 @@ async def test_create_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_create_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -2254,15 +2647,18 @@ async def test_create_feed_flattened_error_async(): with pytest.raises(ValueError): await client.create_feed( asset_service.CreateFeedRequest(), - parent='parent_value', + parent="parent_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, - dict, -]) -def test_get_feed(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.GetFeedRequest, + dict, + ], +) +def test_get_feed(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2273,16 +2669,14 @@ def test_get_feed(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) response = client.get_feed(request) @@ -2294,11 +2688,11 @@ def test_get_feed(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] def test_get_feed_non_empty_request_with_auto_populated_field(): @@ -2306,28 +2700,29 @@ def test_get_feed_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.GetFeedRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetFeedRequest( - name='name_value', + name="name_value", ) + def test_get_feed_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2346,7 +2741,9 @@ def test_get_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc request = {} client.get_feed(request) @@ -2360,6 +2757,7 @@ def test_get_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2375,12 +2773,17 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_feed in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_feed + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_feed] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_feed + ] = mock_rpc request = {} await client.get_feed(request) @@ -2394,8 +2797,11 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): +async def test_get_feed_async( + transport: str = "grpc_asyncio", request_type=asset_service.GetFeedRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2406,17 +2812,17 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) response = await client.get_feed(request) # Establish that the underlying gRPC stub method was called. @@ -2427,17 +2833,18 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.asyncio async def test_get_feed_async_from_dict(): await test_get_feed_async(request_type=dict) + def test_get_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2447,12 +2854,10 @@ def test_get_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.GetFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: call.return_value = asset_service.Feed() client.get_feed(request) @@ -2464,9 +2869,9 @@ def test_get_feed_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2479,12 +2884,10 @@ async def test_get_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.GetFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) await client.get_feed(request) @@ -2496,9 +2899,9 @@ async def test_get_feed_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_feed_flattened(): @@ -2507,15 +2910,13 @@ def test_get_feed_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2523,7 +2924,7 @@ def test_get_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -2537,9 +2938,10 @@ def test_get_feed_flattened_error(): with pytest.raises(ValueError): client.get_feed( asset_service.GetFeedRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -2547,9 +2949,7 @@ async def test_get_feed_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() @@ -2557,7 +2957,7 @@ async def test_get_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2565,9 +2965,10 @@ async def test_get_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -2579,15 +2980,18 @@ async def test_get_feed_flattened_error_async(): with pytest.raises(ValueError): await client.get_feed( asset_service.GetFeedRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, - dict, -]) -def test_list_feeds(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListFeedsRequest, + dict, + ], +) +def test_list_feeds(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2598,12 +3002,9 @@ def test_list_feeds(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = asset_service.ListFeedsResponse( - ) + call.return_value = asset_service.ListFeedsResponse() response = client.list_feeds(request) # Establish that the underlying gRPC stub method was called. @@ -2621,28 +3022,29 @@ def test_list_feeds_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ListFeedsRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_feeds(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListFeedsRequest( - parent='parent_value', + parent="parent_value", ) + def test_list_feeds_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2661,7 +3063,9 @@ def test_list_feeds_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc request = {} client.list_feeds(request) @@ -2675,6 +3079,7 @@ def test_list_feeds_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2690,12 +3095,17 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_feeds in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_feeds + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_feeds] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_feeds + ] = mock_rpc request = {} await client.list_feeds(request) @@ -2709,8 +3119,11 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): +async def test_list_feeds_async( + transport: str = "grpc_asyncio", request_type=asset_service.ListFeedsRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2721,12 +3134,11 @@ async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=as request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListFeedsResponse() + ) response = await client.list_feeds(request) # Establish that the underlying gRPC stub method was called. @@ -2743,6 +3155,7 @@ async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=as async def test_list_feeds_async_from_dict(): await test_list_feeds_async(request_type=dict) + def test_list_feeds_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2752,12 +3165,10 @@ def test_list_feeds_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ListFeedsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: call.return_value = asset_service.ListFeedsResponse() client.list_feeds(request) @@ -2769,9 +3180,9 @@ def test_list_feeds_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2784,13 +3195,13 @@ async def test_list_feeds_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ListFeedsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse()) + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListFeedsResponse() + ) await client.list_feeds(request) # Establish that the underlying gRPC stub method was called. @@ -2801,9 +3212,9 @@ async def test_list_feeds_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_feeds_flattened(): @@ -2812,15 +3223,13 @@ def test_list_feeds_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListFeedsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_feeds( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2828,7 +3237,7 @@ def test_list_feeds_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -2842,9 +3251,10 @@ def test_list_feeds_flattened_error(): with pytest.raises(ValueError): client.list_feeds( asset_service.ListFeedsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_feeds_flattened_async(): client = AssetServiceAsyncClient( @@ -2852,17 +3262,17 @@ async def test_list_feeds_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListFeedsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListFeedsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_feeds( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2870,9 +3280,10 @@ async def test_list_feeds_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_feeds_flattened_error_async(): client = AssetServiceAsyncClient( @@ -2884,15 +3295,18 @@ async def test_list_feeds_flattened_error_async(): with pytest.raises(ValueError): await client.list_feeds( asset_service.ListFeedsRequest(), - parent='parent_value', + parent="parent_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, - dict, -]) -def test_update_feed(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.UpdateFeedRequest, + dict, + ], +) +def test_update_feed(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2903,16 +3317,14 @@ def test_update_feed(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) response = client.update_feed(request) @@ -2924,11 +3336,11 @@ def test_update_feed(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] def test_update_feed_non_empty_request_with_auto_populated_field(): @@ -2936,25 +3348,24 @@ def test_update_feed_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = asset_service.UpdateFeedRequest( - ) + request = asset_service.UpdateFeedRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest( - ) + assert args[0] == asset_service.UpdateFeedRequest() + def test_update_feed_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2974,7 +3385,9 @@ def test_update_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc request = {} client.update_feed(request) @@ -2988,8 +3401,11 @@ def test_update_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_feed_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3003,12 +3419,17 @@ async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_feed in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_feed + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_feed] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_feed + ] = mock_rpc request = {} await client.update_feed(request) @@ -3022,8 +3443,11 @@ async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): +async def test_update_feed_async( + transport: str = "grpc_asyncio", request_type=asset_service.UpdateFeedRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3034,17 +3458,17 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) response = await client.update_feed(request) # Establish that the underlying gRPC stub method was called. @@ -3055,17 +3479,18 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.asyncio async def test_update_feed_async_from_dict(): await test_update_feed_async(request_type=dict) + def test_update_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3075,12 +3500,10 @@ def test_update_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.UpdateFeedRequest() - request.feed.name = 'name_value' + request.feed.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: call.return_value = asset_service.Feed() client.update_feed(request) @@ -3092,9 +3515,9 @@ def test_update_feed_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'feed.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "feed.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3107,12 +3530,10 @@ async def test_update_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.UpdateFeedRequest() - request.feed.name = 'name_value' + request.feed.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) await client.update_feed(request) @@ -3124,9 +3545,9 @@ async def test_update_feed_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'feed.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "feed.name=name_value", + ) in kw["metadata"] def test_update_feed_flattened(): @@ -3135,15 +3556,13 @@ def test_update_feed_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_feed( - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -3151,7 +3570,7 @@ def test_update_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].feed - mock_val = asset_service.Feed(name='name_value') + mock_val = asset_service.Feed(name="name_value") assert arg == mock_val @@ -3165,9 +3584,10 @@ def test_update_feed_flattened_error(): with pytest.raises(ValueError): client.update_feed( asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) + @pytest.mark.asyncio async def test_update_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -3175,9 +3595,7 @@ async def test_update_feed_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed() @@ -3185,7 +3603,7 @@ async def test_update_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_feed( - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -3193,9 +3611,10 @@ async def test_update_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].feed - mock_val = asset_service.Feed(name='name_value') + mock_val = asset_service.Feed(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test_update_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -3207,15 +3626,18 @@ async def test_update_feed_flattened_error_async(): with pytest.raises(ValueError): await client.update_feed( asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, - dict, -]) -def test_delete_feed(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.DeleteFeedRequest, + dict, + ], +) +def test_delete_feed(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3226,9 +3648,7 @@ def test_delete_feed(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_feed(request) @@ -3248,28 +3668,29 @@ def test_delete_feed_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.DeleteFeedRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteFeedRequest( - name='name_value', + name="name_value", ) + def test_delete_feed_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3288,7 +3709,9 @@ def test_delete_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc request = {} client.delete_feed(request) @@ -3302,8 +3725,11 @@ def test_delete_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_feed_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3317,12 +3743,17 @@ async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_feed in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_feed + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_feed] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_feed + ] = mock_rpc request = {} await client.delete_feed(request) @@ -3336,8 +3767,11 @@ async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): +async def test_delete_feed_async( + transport: str = "grpc_asyncio", request_type=asset_service.DeleteFeedRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3348,9 +3782,7 @@ async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=a request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_feed(request) @@ -3369,6 +3801,7 @@ async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=a async def test_delete_feed_async_from_dict(): await test_delete_feed_async(request_type=dict) + def test_delete_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3378,12 +3811,10 @@ def test_delete_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.DeleteFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: call.return_value = None client.delete_feed(request) @@ -3395,9 +3826,9 @@ def test_delete_feed_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3410,12 +3841,10 @@ async def test_delete_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.DeleteFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_feed(request) @@ -3427,9 +3856,9 @@ async def test_delete_feed_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_feed_flattened(): @@ -3438,15 +3867,13 @@ def test_delete_feed_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3454,7 +3881,7 @@ def test_delete_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -3468,9 +3895,10 @@ def test_delete_feed_flattened_error(): with pytest.raises(ValueError): client.delete_feed( asset_service.DeleteFeedRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -3478,9 +3906,7 @@ async def test_delete_feed_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3488,7 +3914,7 @@ async def test_delete_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3496,9 +3922,10 @@ async def test_delete_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -3510,15 +3937,18 @@ async def test_delete_feed_flattened_error_async(): with pytest.raises(ValueError): await client.delete_feed( asset_service.DeleteFeedRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, - dict, -]) -def test_search_all_resources(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.SearchAllResourcesRequest, + dict, + ], +) +def test_search_all_resources(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3530,11 +3960,11 @@ def test_search_all_resources(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.search_all_resources(request) @@ -3546,7 +3976,7 @@ def test_search_all_resources(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_search_all_resources_non_empty_request_with_auto_populated_field(): @@ -3554,34 +3984,37 @@ def test_search_all_resources_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.SearchAllResourcesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.search_all_resources), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.search_all_resources(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllResourcesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) + def test_search_all_resources_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3596,12 +4029,18 @@ def test_search_all_resources_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.search_all_resources in client._transport._wrapped_methods + assert ( + client._transport.search_all_resources in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.search_all_resources] = ( + mock_rpc + ) request = {} client.search_all_resources(request) @@ -3614,8 +4053,11 @@ def test_search_all_resources_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_search_all_resources_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3629,12 +4071,17 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.search_all_resources in client._client._transport._wrapped_methods + assert ( + client._client._transport.search_all_resources + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_all_resources] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.search_all_resources + ] = mock_rpc request = {} await client.search_all_resources(request) @@ -3648,8 +4095,12 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): +async def test_search_all_resources_async( + transport: str = "grpc_asyncio", + request_type=asset_service.SearchAllResourcesRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3661,12 +4112,14 @@ async def test_search_all_resources_async(transport: str = 'grpc_asyncio', reque # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllResourcesResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.search_all_resources(request) # Establish that the underlying gRPC stub method was called. @@ -3677,13 +4130,14 @@ async def test_search_all_resources_async(transport: str = 'grpc_asyncio', reque # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllResourcesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_search_all_resources_async_from_dict(): await test_search_all_resources_async(request_type=dict) + def test_search_all_resources_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3693,12 +4147,12 @@ def test_search_all_resources_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllResourcesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: call.return_value = asset_service.SearchAllResourcesResponse() client.search_all_resources(request) @@ -3710,9 +4164,9 @@ def test_search_all_resources_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3725,13 +4179,15 @@ async def test_search_all_resources_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllResourcesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse()) + type(client.transport.search_all_resources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllResourcesResponse() + ) await client.search_all_resources(request) # Establish that the underlying gRPC stub method was called. @@ -3742,9 +4198,9 @@ async def test_search_all_resources_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] def test_search_all_resources_flattened(): @@ -3754,16 +4210,16 @@ def test_search_all_resources_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllResourcesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.search_all_resources( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) # Establish that the underlying call was made with the expected @@ -3771,13 +4227,13 @@ def test_search_all_resources_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val arg = args[0].asset_types - mock_val = ['asset_types_value'] + mock_val = ["asset_types_value"] assert arg == mock_val @@ -3791,11 +4247,12 @@ def test_search_all_resources_flattened_error(): with pytest.raises(ValueError): client.search_all_resources( asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) + @pytest.mark.asyncio async def test_search_all_resources_flattened_async(): client = AssetServiceAsyncClient( @@ -3804,18 +4261,20 @@ async def test_search_all_resources_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllResourcesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllResourcesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.search_all_resources( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) # Establish that the underlying call was made with the expected @@ -3823,15 +4282,16 @@ async def test_search_all_resources_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val arg = args[0].asset_types - mock_val = ['asset_types_value'] + mock_val = ["asset_types_value"] assert arg == mock_val + @pytest.mark.asyncio async def test_search_all_resources_flattened_error_async(): client = AssetServiceAsyncClient( @@ -3843,9 +4303,9 @@ async def test_search_all_resources_flattened_error_async(): with pytest.raises(ValueError): await client.search_all_resources( asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) @@ -3857,8 +4317,8 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllResourcesResponse( @@ -3867,17 +4327,17 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): assets.ResourceSearchResult(), assets.ResourceSearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllResourcesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllResourcesResponse( results=[ assets.ResourceSearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllResourcesResponse( results=[ @@ -3892,9 +4352,7 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.search_all_resources(request={}, retry=retry, timeout=timeout) @@ -3904,8 +4362,9 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.ResourceSearchResult) - for i in results) + assert all(isinstance(i, assets.ResourceSearchResult) for i in results) + + def test_search_all_resources_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3914,8 +4373,8 @@ def test_search_all_resources_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllResourcesResponse( @@ -3924,17 +4383,17 @@ def test_search_all_resources_pages(transport_name: str = "grpc"): assets.ResourceSearchResult(), assets.ResourceSearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllResourcesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllResourcesResponse( results=[ assets.ResourceSearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllResourcesResponse( results=[ @@ -3945,9 +4404,10 @@ def test_search_all_resources_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.search_all_resources(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_search_all_resources_async_pager(): client = AssetServiceAsyncClient( @@ -3956,8 +4416,10 @@ async def test_search_all_resources_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.search_all_resources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllResourcesResponse( @@ -3966,17 +4428,17 @@ async def test_search_all_resources_async_pager(): assets.ResourceSearchResult(), assets.ResourceSearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllResourcesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllResourcesResponse( results=[ assets.ResourceSearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllResourcesResponse( results=[ @@ -3986,15 +4448,16 @@ async def test_search_all_resources_async_pager(): ), RuntimeError, ) - async_pager = await client.search_all_resources(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.search_all_resources( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, assets.ResourceSearchResult) - for i in responses) + assert all(isinstance(i, assets.ResourceSearchResult) for i in responses) @pytest.mark.asyncio @@ -4005,8 +4468,10 @@ async def test_search_all_resources_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.search_all_resources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllResourcesResponse( @@ -4015,17 +4480,17 @@ async def test_search_all_resources_async_pages(): assets.ResourceSearchResult(), assets.ResourceSearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllResourcesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllResourcesResponse( results=[ assets.ResourceSearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllResourcesResponse( results=[ @@ -4038,18 +4503,22 @@ async def test_search_all_resources_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.search_all_resources(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, - dict, -]) -def test_search_all_iam_policies(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.SearchAllIamPoliciesRequest, + dict, + ], +) +def test_search_all_iam_policies(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4061,11 +4530,11 @@ def test_search_all_iam_policies(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.search_all_iam_policies(request) @@ -4077,7 +4546,7 @@ def test_search_all_iam_policies(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): @@ -4085,34 +4554,37 @@ def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.SearchAllIamPoliciesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.search_all_iam_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.search_all_iam_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllIamPoliciesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) + def test_search_all_iam_policies_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4127,12 +4599,19 @@ def test_search_all_iam_policies_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.search_all_iam_policies in client._transport._wrapped_methods + assert ( + client._transport.search_all_iam_policies + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.search_all_iam_policies + ] = mock_rpc request = {} client.search_all_iam_policies(request) @@ -4145,8 +4624,11 @@ def test_search_all_iam_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_search_all_iam_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4160,12 +4642,17 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: s wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.search_all_iam_policies in client._client._transport._wrapped_methods + assert ( + client._client._transport.search_all_iam_policies + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_all_iam_policies] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.search_all_iam_policies + ] = mock_rpc request = {} await client.search_all_iam_policies(request) @@ -4179,8 +4666,12 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: s assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): +async def test_search_all_iam_policies_async( + transport: str = "grpc_asyncio", + request_type=asset_service.SearchAllIamPoliciesRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4192,12 +4683,14 @@ async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', re # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllIamPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.search_all_iam_policies(request) # Establish that the underlying gRPC stub method was called. @@ -4208,13 +4701,14 @@ async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', re # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllIamPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_search_all_iam_policies_async_from_dict(): await test_search_all_iam_policies_async(request_type=dict) + def test_search_all_iam_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4224,12 +4718,12 @@ def test_search_all_iam_policies_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: call.return_value = asset_service.SearchAllIamPoliciesResponse() client.search_all_iam_policies(request) @@ -4241,9 +4735,9 @@ def test_search_all_iam_policies_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4256,13 +4750,15 @@ async def test_search_all_iam_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse()) + type(client.transport.search_all_iam_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllIamPoliciesResponse() + ) await client.search_all_iam_policies(request) # Establish that the underlying gRPC stub method was called. @@ -4273,9 +4769,9 @@ async def test_search_all_iam_policies_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] def test_search_all_iam_policies_flattened(): @@ -4285,15 +4781,15 @@ def test_search_all_iam_policies_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllIamPoliciesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.search_all_iam_policies( - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) # Establish that the underlying call was made with the expected @@ -4301,10 +4797,10 @@ def test_search_all_iam_policies_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val @@ -4318,10 +4814,11 @@ def test_search_all_iam_policies_flattened_error(): with pytest.raises(ValueError): client.search_all_iam_policies( asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) + @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_async(): client = AssetServiceAsyncClient( @@ -4330,17 +4827,19 @@ async def test_search_all_iam_policies_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllIamPoliciesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllIamPoliciesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.search_all_iam_policies( - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) # Establish that the underlying call was made with the expected @@ -4348,12 +4847,13 @@ async def test_search_all_iam_policies_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val + @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_error_async(): client = AssetServiceAsyncClient( @@ -4365,8 +4865,8 @@ async def test_search_all_iam_policies_flattened_error_async(): with pytest.raises(ValueError): await client.search_all_iam_policies( asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) @@ -4378,8 +4878,8 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllIamPoliciesResponse( @@ -4388,17 +4888,17 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): assets.IamPolicySearchResult(), assets.IamPolicySearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllIamPoliciesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllIamPoliciesResponse( results=[ assets.IamPolicySearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllIamPoliciesResponse( results=[ @@ -4413,9 +4913,7 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.search_all_iam_policies(request={}, retry=retry, timeout=timeout) @@ -4425,8 +4923,9 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in results) + assert all(isinstance(i, assets.IamPolicySearchResult) for i in results) + + def test_search_all_iam_policies_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4435,8 +4934,8 @@ def test_search_all_iam_policies_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllIamPoliciesResponse( @@ -4445,17 +4944,17 @@ def test_search_all_iam_policies_pages(transport_name: str = "grpc"): assets.IamPolicySearchResult(), assets.IamPolicySearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllIamPoliciesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllIamPoliciesResponse( results=[ assets.IamPolicySearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllIamPoliciesResponse( results=[ @@ -4466,9 +4965,10 @@ def test_search_all_iam_policies_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.search_all_iam_policies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_search_all_iam_policies_async_pager(): client = AssetServiceAsyncClient( @@ -4477,8 +4977,10 @@ async def test_search_all_iam_policies_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.search_all_iam_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllIamPoliciesResponse( @@ -4487,17 +4989,17 @@ async def test_search_all_iam_policies_async_pager(): assets.IamPolicySearchResult(), assets.IamPolicySearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllIamPoliciesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllIamPoliciesResponse( results=[ assets.IamPolicySearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllIamPoliciesResponse( results=[ @@ -4507,15 +5009,16 @@ async def test_search_all_iam_policies_async_pager(): ), RuntimeError, ) - async_pager = await client.search_all_iam_policies(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.search_all_iam_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in responses) + assert all(isinstance(i, assets.IamPolicySearchResult) for i in responses) @pytest.mark.asyncio @@ -4526,8 +5029,10 @@ async def test_search_all_iam_policies_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.search_all_iam_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.SearchAllIamPoliciesResponse( @@ -4536,17 +5041,17 @@ async def test_search_all_iam_policies_async_pages(): assets.IamPolicySearchResult(), assets.IamPolicySearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllIamPoliciesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllIamPoliciesResponse( results=[ assets.IamPolicySearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllIamPoliciesResponse( results=[ @@ -4559,18 +5064,22 @@ async def test_search_all_iam_policies_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.search_all_iam_policies(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyRequest, - dict, -]) -def test_analyze_iam_policy(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeIamPolicyRequest, + dict, + ], +) +def test_analyze_iam_policy(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4582,8 +5091,8 @@ def test_analyze_iam_policy(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeIamPolicyResponse( fully_explored=True, @@ -4606,28 +5115,31 @@ def test_analyze_iam_policy_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeIamPolicyRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.analyze_iam_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_iam_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) + def test_analyze_iam_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4642,12 +5154,18 @@ def test_analyze_iam_policy_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_iam_policy in client._transport._wrapped_methods + assert ( + client._transport.analyze_iam_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.analyze_iam_policy] = ( + mock_rpc + ) request = {} client.analyze_iam_policy(request) @@ -4660,8 +5178,11 @@ def test_analyze_iam_policy_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4675,12 +5196,17 @@ async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_iam_policy in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_iam_policy + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_iam_policy + ] = mock_rpc request = {} await client.analyze_iam_policy(request) @@ -4694,8 +5220,11 @@ async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest): +async def test_analyze_iam_policy_async( + transport: str = "grpc_asyncio", request_type=asset_service.AnalyzeIamPolicyRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4707,12 +5236,14 @@ async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + ) + ) response = await client.analyze_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -4730,6 +5261,7 @@ async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request async def test_analyze_iam_policy_async_from_dict(): await test_analyze_iam_policy_async(request_type=dict) + def test_analyze_iam_policy_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4739,12 +5271,12 @@ def test_analyze_iam_policy_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: call.return_value = asset_service.AnalyzeIamPolicyResponse() client.analyze_iam_policy(request) @@ -4756,9 +5288,9 @@ def test_analyze_iam_policy_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "analysis_query.scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4771,13 +5303,15 @@ async def test_analyze_iam_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse()) + type(client.transport.analyze_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeIamPolicyResponse() + ) await client.analyze_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -4788,16 +5322,19 @@ async def test_analyze_iam_policy_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "analysis_query.scope=scope_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyLongrunningRequest, - dict, -]) -def test_analyze_iam_policy_longrunning(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeIamPolicyLongrunningRequest, + dict, + ], +) +def test_analyze_iam_policy_longrunning(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4809,10 +5346,10 @@ def test_analyze_iam_policy_longrunning(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.analyze_iam_policy_longrunning(request) # Establish that the underlying gRPC stub method was called. @@ -4830,28 +5367,31 @@ def test_analyze_iam_policy_longrunning_non_empty_request_with_auto_populated_fi # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeIamPolicyLongrunningRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_iam_policy_longrunning(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) + def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4866,12 +5406,19 @@ def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_iam_policy_longrunning in client._transport._wrapped_methods + assert ( + client._transport.analyze_iam_policy_longrunning + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy_longrunning] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_iam_policy_longrunning + ] = mock_rpc request = {} client.analyze_iam_policy_longrunning(request) @@ -4889,8 +5436,11 @@ def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4904,12 +5454,17 @@ async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(trans wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_iam_policy_longrunning in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_iam_policy_longrunning + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy_longrunning] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_iam_policy_longrunning + ] = mock_rpc request = {} await client.analyze_iam_policy_longrunning(request) @@ -4928,8 +5483,12 @@ async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(trans assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +async def test_analyze_iam_policy_longrunning_async( + transport: str = "grpc_asyncio", + request_type=asset_service.AnalyzeIamPolicyLongrunningRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4941,11 +5500,11 @@ async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_async # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.analyze_iam_policy_longrunning(request) @@ -4963,6 +5522,7 @@ async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_async async def test_analyze_iam_policy_longrunning_async_from_dict(): await test_analyze_iam_policy_longrunning_async(request_type=dict) + def test_analyze_iam_policy_longrunning_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4972,13 +5532,13 @@ def test_analyze_iam_policy_longrunning_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyLongrunningRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.analyze_iam_policy_longrunning(request) # Establish that the underlying gRPC stub method was called. @@ -4989,9 +5549,9 @@ def test_analyze_iam_policy_longrunning_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "analysis_query.scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5004,13 +5564,15 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyLongrunningRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.analyze_iam_policy_longrunning(request) # Establish that the underlying gRPC stub method was called. @@ -5021,16 +5583,19 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "analysis_query.scope=scope_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeMoveRequest, - dict, -]) -def test_analyze_move(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeMoveRequest, + dict, + ], +) +def test_analyze_move(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5041,12 +5606,9 @@ def test_analyze_move(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeMoveResponse( - ) + call.return_value = asset_service.AnalyzeMoveResponse() response = client.analyze_move(request) # Establish that the underlying gRPC stub method was called. @@ -5064,30 +5626,31 @@ def test_analyze_move_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeMoveRequest( - resource='resource_value', - destination_parent='destination_parent_value', + resource="resource_value", + destination_parent="destination_parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_move(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeMoveRequest( - resource='resource_value', - destination_parent='destination_parent_value', + resource="resource_value", + destination_parent="destination_parent_value", ) + def test_analyze_move_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5106,7 +5669,9 @@ def test_analyze_move_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.analyze_move] = mock_rpc request = {} client.analyze_move(request) @@ -5120,8 +5685,11 @@ def test_analyze_move_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_move_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5135,12 +5703,17 @@ async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_move in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_move + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_move] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_move + ] = mock_rpc request = {} await client.analyze_move(request) @@ -5154,8 +5727,11 @@ async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeMoveRequest): +async def test_analyze_move_async( + transport: str = "grpc_asyncio", request_type=asset_service.AnalyzeMoveRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5166,12 +5742,11 @@ async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeMoveResponse() + ) response = await client.analyze_move(request) # Establish that the underlying gRPC stub method was called. @@ -5188,6 +5763,7 @@ async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type= async def test_analyze_move_async_from_dict(): await test_analyze_move_async(request_type=dict) + def test_analyze_move_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5197,12 +5773,10 @@ def test_analyze_move_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeMoveRequest() - request.resource = 'resource_value' + request.resource = "resource_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: call.return_value = asset_service.AnalyzeMoveResponse() client.analyze_move(request) @@ -5214,9 +5788,9 @@ def test_analyze_move_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5229,13 +5803,13 @@ async def test_analyze_move_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeMoveRequest() - request.resource = 'resource_value' + request.resource = "resource_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse()) + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeMoveResponse() + ) await client.analyze_move(request) # Establish that the underlying gRPC stub method was called. @@ -5246,16 +5820,19 @@ async def test_analyze_move_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.QueryAssetsRequest, - dict, -]) -def test_query_assets(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.QueryAssetsRequest, + dict, + ], +) +def test_query_assets(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5266,12 +5843,10 @@ def test_query_assets(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.QueryAssetsResponse( - job_reference='job_reference_value', + job_reference="job_reference_value", done=True, ) response = client.query_assets(request) @@ -5284,7 +5859,7 @@ def test_query_assets(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' + assert response.job_reference == "job_reference_value" assert response.done is True @@ -5293,34 +5868,35 @@ def test_query_assets_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.QueryAssetsRequest( - parent='parent_value', - statement='statement_value', - job_reference='job_reference_value', - page_token='page_token_value', + parent="parent_value", + statement="statement_value", + job_reference="job_reference_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.query_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.QueryAssetsRequest( - parent='parent_value', - statement='statement_value', - job_reference='job_reference_value', - page_token='page_token_value', + parent="parent_value", + statement="statement_value", + job_reference="job_reference_value", + page_token="page_token_value", ) + def test_query_assets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5339,7 +5915,9 @@ def test_query_assets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc request = {} client.query_assets(request) @@ -5353,8 +5931,11 @@ def test_query_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_query_assets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5368,12 +5949,17 @@ async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.query_assets in client._client._transport._wrapped_methods + assert ( + client._client._transport.query_assets + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.query_assets] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.query_assets + ] = mock_rpc request = {} await client.query_assets(request) @@ -5387,8 +5973,11 @@ async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.QueryAssetsRequest): +async def test_query_assets_async( + transport: str = "grpc_asyncio", request_type=asset_service.QueryAssetsRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5399,14 +5988,14 @@ async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.QueryAssetsResponse( + job_reference="job_reference_value", + done=True, + ) + ) response = await client.query_assets(request) # Establish that the underlying gRPC stub method was called. @@ -5417,7 +6006,7 @@ async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' + assert response.job_reference == "job_reference_value" assert response.done is True @@ -5425,6 +6014,7 @@ async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type= async def test_query_assets_async_from_dict(): await test_query_assets_async(request_type=dict) + def test_query_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5434,12 +6024,10 @@ def test_query_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.QueryAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: call.return_value = asset_service.QueryAssetsResponse() client.query_assets(request) @@ -5451,9 +6039,9 @@ def test_query_assets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5466,13 +6054,13 @@ async def test_query_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.QueryAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse()) + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.QueryAssetsResponse() + ) await client.query_assets(request) # Establish that the underlying gRPC stub method was called. @@ -5483,16 +6071,19 @@ async def test_query_assets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.CreateSavedQueryRequest, - dict, -]) -def test_create_saved_query(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.CreateSavedQueryRequest, + dict, + ], +) +def test_create_saved_query(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5504,14 +6095,14 @@ def test_create_saved_query(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) response = client.create_saved_query(request) @@ -5523,10 +6114,10 @@ def test_create_saved_query(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" def test_create_saved_query_non_empty_request_with_auto_populated_field(): @@ -5534,30 +6125,33 @@ def test_create_saved_query_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.CreateSavedQueryRequest( - parent='parent_value', - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query_id="saved_query_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.create_saved_query), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateSavedQueryRequest( - parent='parent_value', - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query_id="saved_query_id_value", ) + def test_create_saved_query_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5572,12 +6166,18 @@ def test_create_saved_query_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_saved_query in client._transport._wrapped_methods + assert ( + client._transport.create_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_saved_query] = ( + mock_rpc + ) request = {} client.create_saved_query(request) @@ -5590,8 +6190,11 @@ def test_create_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_saved_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5605,12 +6208,17 @@ async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_saved_query in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_saved_query + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_saved_query] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_saved_query + ] = mock_rpc request = {} await client.create_saved_query(request) @@ -5624,8 +6232,11 @@ async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateSavedQueryRequest): +async def test_create_saved_query_async( + transport: str = "grpc_asyncio", request_type=asset_service.CreateSavedQueryRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5637,15 +6248,17 @@ async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) response = await client.create_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -5656,16 +6269,17 @@ async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.asyncio async def test_create_saved_query_async_from_dict(): await test_create_saved_query_async(request_type=dict) + def test_create_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5675,12 +6289,12 @@ def test_create_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.CreateSavedQueryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: call.return_value = asset_service.SavedQuery() client.create_saved_query(request) @@ -5692,9 +6306,9 @@ def test_create_saved_query_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5707,13 +6321,15 @@ async def test_create_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.CreateSavedQueryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + type(client.transport.create_saved_query), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) await client.create_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -5724,9 +6340,9 @@ async def test_create_saved_query_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_saved_query_flattened(): @@ -5736,16 +6352,16 @@ def test_create_saved_query_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_saved_query( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) # Establish that the underlying call was made with the expected @@ -5753,13 +6369,13 @@ def test_create_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].saved_query_id - mock_val = 'saved_query_id_value' + mock_val = "saved_query_id_value" assert arg == mock_val @@ -5773,11 +6389,12 @@ def test_create_saved_query_flattened_error(): with pytest.raises(ValueError): client.create_saved_query( asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) + @pytest.mark.asyncio async def test_create_saved_query_flattened_async(): client = AssetServiceAsyncClient( @@ -5786,18 +6403,20 @@ async def test_create_saved_query_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_saved_query( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) # Establish that the underlying call was made with the expected @@ -5805,15 +6424,16 @@ async def test_create_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].saved_query_id - mock_val = 'saved_query_id_value' + mock_val = "saved_query_id_value" assert arg == mock_val + @pytest.mark.asyncio async def test_create_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( @@ -5825,17 +6445,20 @@ async def test_create_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.create_saved_query( asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.GetSavedQueryRequest, - dict, -]) -def test_get_saved_query(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.GetSavedQueryRequest, + dict, + ], +) +def test_get_saved_query(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5846,15 +6469,13 @@ def test_get_saved_query(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) response = client.get_saved_query(request) @@ -5866,10 +6487,10 @@ def test_get_saved_query(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" def test_get_saved_query_non_empty_request_with_auto_populated_field(): @@ -5877,28 +6498,29 @@ def test_get_saved_query_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.GetSavedQueryRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetSavedQueryRequest( - name='name_value', + name="name_value", ) + def test_get_saved_query_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5917,7 +6539,9 @@ def test_get_saved_query_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_saved_query] = mock_rpc request = {} client.get_saved_query(request) @@ -5931,8 +6555,11 @@ def test_get_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_saved_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5946,12 +6573,17 @@ async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_saved_query in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_saved_query + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_saved_query] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_saved_query + ] = mock_rpc request = {} await client.get_saved_query(request) @@ -5965,8 +6597,11 @@ async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetSavedQueryRequest): +async def test_get_saved_query_async( + transport: str = "grpc_asyncio", request_type=asset_service.GetSavedQueryRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5977,16 +6612,16 @@ async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) response = await client.get_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -5997,16 +6632,17 @@ async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_ty # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.asyncio async def test_get_saved_query_async_from_dict(): await test_get_saved_query_async(request_type=dict) + def test_get_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6016,12 +6652,10 @@ def test_get_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.GetSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: call.return_value = asset_service.SavedQuery() client.get_saved_query(request) @@ -6033,9 +6667,9 @@ def test_get_saved_query_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6048,13 +6682,13 @@ async def test_get_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.GetSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) await client.get_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -6065,9 +6699,9 @@ async def test_get_saved_query_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_saved_query_flattened(): @@ -6076,15 +6710,13 @@ def test_get_saved_query_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6092,7 +6724,7 @@ def test_get_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -6106,9 +6738,10 @@ def test_get_saved_query_flattened_error(): with pytest.raises(ValueError): client.get_saved_query( asset_service.GetSavedQueryRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_saved_query_flattened_async(): client = AssetServiceAsyncClient( @@ -6116,17 +6749,17 @@ async def test_get_saved_query_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6134,9 +6767,10 @@ async def test_get_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( @@ -6148,15 +6782,18 @@ async def test_get_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.get_saved_query( asset_service.GetSavedQueryRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.ListSavedQueriesRequest, - dict, -]) -def test_list_saved_queries(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListSavedQueriesRequest, + dict, + ], +) +def test_list_saved_queries(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6168,11 +6805,11 @@ def test_list_saved_queries(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_saved_queries(request) @@ -6184,7 +6821,7 @@ def test_list_saved_queries(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSavedQueriesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_saved_queries_non_empty_request_with_auto_populated_field(): @@ -6192,32 +6829,35 @@ def test_list_saved_queries_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ListSavedQueriesRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', + parent="parent_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.list_saved_queries), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_saved_queries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListSavedQueriesRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', + parent="parent_value", + filter="filter_value", + page_token="page_token_value", ) + def test_list_saved_queries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6232,12 +6872,18 @@ def test_list_saved_queries_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_saved_queries in client._transport._wrapped_methods + assert ( + client._transport.list_saved_queries in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_saved_queries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_saved_queries] = ( + mock_rpc + ) request = {} client.list_saved_queries(request) @@ -6250,8 +6896,11 @@ def test_list_saved_queries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_saved_queries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6265,12 +6914,17 @@ async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_saved_queries in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_saved_queries + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_saved_queries] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_saved_queries + ] = mock_rpc request = {} await client.list_saved_queries(request) @@ -6284,8 +6938,11 @@ async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListSavedQueriesRequest): +async def test_list_saved_queries_async( + transport: str = "grpc_asyncio", request_type=asset_service.ListSavedQueriesRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6297,12 +6954,14 @@ async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListSavedQueriesResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_saved_queries(request) # Establish that the underlying gRPC stub method was called. @@ -6313,13 +6972,14 @@ async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSavedQueriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_saved_queries_async_from_dict(): await test_list_saved_queries_async(request_type=dict) + def test_list_saved_queries_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6329,12 +6989,12 @@ def test_list_saved_queries_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ListSavedQueriesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: call.return_value = asset_service.ListSavedQueriesResponse() client.list_saved_queries(request) @@ -6346,9 +7006,9 @@ def test_list_saved_queries_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6361,13 +7021,15 @@ async def test_list_saved_queries_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ListSavedQueriesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse()) + type(client.transport.list_saved_queries), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListSavedQueriesResponse() + ) await client.list_saved_queries(request) # Establish that the underlying gRPC stub method was called. @@ -6378,9 +7040,9 @@ async def test_list_saved_queries_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_saved_queries_flattened(): @@ -6390,14 +7052,14 @@ def test_list_saved_queries_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListSavedQueriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_saved_queries( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -6405,7 +7067,7 @@ def test_list_saved_queries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -6419,9 +7081,10 @@ def test_list_saved_queries_flattened_error(): with pytest.raises(ValueError): client.list_saved_queries( asset_service.ListSavedQueriesRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_saved_queries_flattened_async(): client = AssetServiceAsyncClient( @@ -6430,16 +7093,18 @@ async def test_list_saved_queries_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListSavedQueriesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListSavedQueriesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_saved_queries( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -6447,9 +7112,10 @@ async def test_list_saved_queries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_saved_queries_flattened_error_async(): client = AssetServiceAsyncClient( @@ -6461,7 +7127,7 @@ async def test_list_saved_queries_flattened_error_async(): with pytest.raises(ValueError): await client.list_saved_queries( asset_service.ListSavedQueriesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -6473,8 +7139,8 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListSavedQueriesResponse( @@ -6483,17 +7149,17 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): asset_service.SavedQuery(), asset_service.SavedQuery(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListSavedQueriesResponse( saved_queries=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListSavedQueriesResponse( saved_queries=[ asset_service.SavedQuery(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListSavedQueriesResponse( saved_queries=[ @@ -6508,9 +7174,7 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_saved_queries(request={}, retry=retry, timeout=timeout) @@ -6520,8 +7184,9 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.SavedQuery) - for i in results) + assert all(isinstance(i, asset_service.SavedQuery) for i in results) + + def test_list_saved_queries_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6530,8 +7195,8 @@ def test_list_saved_queries_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListSavedQueriesResponse( @@ -6540,17 +7205,17 @@ def test_list_saved_queries_pages(transport_name: str = "grpc"): asset_service.SavedQuery(), asset_service.SavedQuery(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListSavedQueriesResponse( saved_queries=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListSavedQueriesResponse( saved_queries=[ asset_service.SavedQuery(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListSavedQueriesResponse( saved_queries=[ @@ -6561,9 +7226,10 @@ def test_list_saved_queries_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_saved_queries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_saved_queries_async_pager(): client = AssetServiceAsyncClient( @@ -6572,8 +7238,10 @@ async def test_list_saved_queries_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_saved_queries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListSavedQueriesResponse( @@ -6582,17 +7250,17 @@ async def test_list_saved_queries_async_pager(): asset_service.SavedQuery(), asset_service.SavedQuery(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListSavedQueriesResponse( saved_queries=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListSavedQueriesResponse( saved_queries=[ asset_service.SavedQuery(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListSavedQueriesResponse( saved_queries=[ @@ -6602,15 +7270,16 @@ async def test_list_saved_queries_async_pager(): ), RuntimeError, ) - async_pager = await client.list_saved_queries(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_saved_queries( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, asset_service.SavedQuery) - for i in responses) + assert all(isinstance(i, asset_service.SavedQuery) for i in responses) @pytest.mark.asyncio @@ -6621,8 +7290,10 @@ async def test_list_saved_queries_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_saved_queries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.ListSavedQueriesResponse( @@ -6631,17 +7302,17 @@ async def test_list_saved_queries_async_pages(): asset_service.SavedQuery(), asset_service.SavedQuery(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListSavedQueriesResponse( saved_queries=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListSavedQueriesResponse( saved_queries=[ asset_service.SavedQuery(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListSavedQueriesResponse( saved_queries=[ @@ -6654,18 +7325,22 @@ async def test_list_saved_queries_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_saved_queries(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateSavedQueryRequest, - dict, -]) -def test_update_saved_query(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.UpdateSavedQueryRequest, + dict, + ], +) +def test_update_saved_query(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6677,14 +7352,14 @@ def test_update_saved_query(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) response = client.update_saved_query(request) @@ -6696,10 +7371,10 @@ def test_update_saved_query(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" def test_update_saved_query_non_empty_request_with_auto_populated_field(): @@ -6707,25 +7382,26 @@ def test_update_saved_query_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = asset_service.UpdateSavedQueryRequest( - ) + request = asset_service.UpdateSavedQueryRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.update_saved_query), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateSavedQueryRequest( - ) + assert args[0] == asset_service.UpdateSavedQueryRequest() + def test_update_saved_query_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6741,12 +7417,18 @@ def test_update_saved_query_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_saved_query in client._transport._wrapped_methods + assert ( + client._transport.update_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_saved_query] = ( + mock_rpc + ) request = {} client.update_saved_query(request) @@ -6759,8 +7441,11 @@ def test_update_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_saved_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6774,12 +7459,17 @@ async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_saved_query in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_saved_query + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_saved_query] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_saved_query + ] = mock_rpc request = {} await client.update_saved_query(request) @@ -6793,8 +7483,11 @@ async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateSavedQueryRequest): +async def test_update_saved_query_async( + transport: str = "grpc_asyncio", request_type=asset_service.UpdateSavedQueryRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6806,15 +7499,17 @@ async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) response = await client.update_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -6825,16 +7520,17 @@ async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.asyncio async def test_update_saved_query_async_from_dict(): await test_update_saved_query_async(request_type=dict) + def test_update_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6844,12 +7540,12 @@ def test_update_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.UpdateSavedQueryRequest() - request.saved_query.name = 'name_value' + request.saved_query.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: call.return_value = asset_service.SavedQuery() client.update_saved_query(request) @@ -6861,9 +7557,9 @@ def test_update_saved_query_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'saved_query.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "saved_query.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6876,13 +7572,15 @@ async def test_update_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.UpdateSavedQueryRequest() - request.saved_query.name = 'name_value' + request.saved_query.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + type(client.transport.update_saved_query), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) await client.update_saved_query(request) # Establish that the underlying gRPC stub method was called. @@ -6893,9 +7591,9 @@ async def test_update_saved_query_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'saved_query.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "saved_query.name=name_value", + ) in kw["metadata"] def test_update_saved_query_flattened(): @@ -6905,15 +7603,15 @@ def test_update_saved_query_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_saved_query( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6921,10 +7619,10 @@ def test_update_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -6938,10 +7636,11 @@ def test_update_saved_query_flattened_error(): with pytest.raises(ValueError): client.update_saved_query( asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test_update_saved_query_flattened_async(): client = AssetServiceAsyncClient( @@ -6950,17 +7649,19 @@ async def test_update_saved_query_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_saved_query( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6968,12 +7669,13 @@ async def test_update_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test_update_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( @@ -6985,16 +7687,19 @@ async def test_update_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.update_saved_query( asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteSavedQueryRequest, - dict, -]) -def test_delete_saved_query(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.DeleteSavedQueryRequest, + dict, + ], +) +def test_delete_saved_query(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7006,8 +7711,8 @@ def test_delete_saved_query(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_saved_query(request) @@ -7027,28 +7732,31 @@ def test_delete_saved_query_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.DeleteSavedQueryRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.delete_saved_query), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteSavedQueryRequest( - name='name_value', + name="name_value", ) + def test_delete_saved_query_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7063,12 +7771,18 @@ def test_delete_saved_query_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_saved_query in client._transport._wrapped_methods + assert ( + client._transport.delete_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_saved_query] = ( + mock_rpc + ) request = {} client.delete_saved_query(request) @@ -7081,8 +7795,11 @@ def test_delete_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_saved_query_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7096,12 +7813,17 @@ async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_saved_query in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_saved_query + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_saved_query] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_saved_query + ] = mock_rpc request = {} await client.delete_saved_query(request) @@ -7115,8 +7837,11 @@ async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteSavedQueryRequest): +async def test_delete_saved_query_async( + transport: str = "grpc_asyncio", request_type=asset_service.DeleteSavedQueryRequest +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7128,8 +7853,8 @@ async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_saved_query(request) @@ -7148,6 +7873,7 @@ async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request async def test_delete_saved_query_async_from_dict(): await test_delete_saved_query_async(request_type=dict) + def test_delete_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7157,12 +7883,12 @@ def test_delete_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.DeleteSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: call.return_value = None client.delete_saved_query(request) @@ -7174,9 +7900,9 @@ def test_delete_saved_query_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7189,12 +7915,12 @@ async def test_delete_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.DeleteSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_saved_query(request) @@ -7206,9 +7932,9 @@ async def test_delete_saved_query_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_saved_query_flattened(): @@ -7218,14 +7944,14 @@ def test_delete_saved_query_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7233,7 +7959,7 @@ def test_delete_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7247,9 +7973,10 @@ def test_delete_saved_query_flattened_error(): with pytest.raises(ValueError): client.delete_saved_query( asset_service.DeleteSavedQueryRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_saved_query_flattened_async(): client = AssetServiceAsyncClient( @@ -7258,8 +7985,8 @@ async def test_delete_saved_query_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -7267,7 +7994,7 @@ async def test_delete_saved_query_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7275,9 +8002,10 @@ async def test_delete_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( @@ -7289,15 +8017,18 @@ async def test_delete_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.delete_saved_query( asset_service.DeleteSavedQueryRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetEffectiveIamPoliciesRequest, - dict, -]) -def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.BatchGetEffectiveIamPoliciesRequest, + dict, + ], +) +def test_batch_get_effective_iam_policies(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7309,11 +8040,10 @@ def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc') # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( - ) + call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() response = client.batch_get_effective_iam_policies(request) # Establish that the underlying gRPC stub method was called. @@ -7331,28 +8061,31 @@ def test_batch_get_effective_iam_policies_non_empty_request_with_auto_populated_ # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.BatchGetEffectiveIamPoliciesRequest( - scope='scope_value', + scope="scope_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.batch_get_effective_iam_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest( - scope='scope_value', + scope="scope_value", ) + def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7367,12 +8100,19 @@ def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.batch_get_effective_iam_policies in client._transport._wrapped_methods + assert ( + client._transport.batch_get_effective_iam_policies + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_effective_iam_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_effective_iam_policies + ] = mock_rpc request = {} client.batch_get_effective_iam_policies(request) @@ -7385,8 +8125,11 @@ def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7400,12 +8143,17 @@ async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(tra wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.batch_get_effective_iam_policies in client._client._transport._wrapped_methods + assert ( + client._client._transport.batch_get_effective_iam_policies + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_get_effective_iam_policies] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_effective_iam_policies + ] = mock_rpc request = {} await client.batch_get_effective_iam_policies(request) @@ -7419,8 +8167,12 @@ async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(tra assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): +async def test_batch_get_effective_iam_policies_async( + transport: str = "grpc_asyncio", + request_type=asset_service.BatchGetEffectiveIamPoliciesRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7432,11 +8184,12 @@ async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asy # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetEffectiveIamPoliciesResponse() + ) response = await client.batch_get_effective_iam_policies(request) # Establish that the underlying gRPC stub method was called. @@ -7453,6 +8206,7 @@ async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asy async def test_batch_get_effective_iam_policies_async_from_dict(): await test_batch_get_effective_iam_policies_async(request_type=dict) + def test_batch_get_effective_iam_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7462,12 +8216,12 @@ def test_batch_get_effective_iam_policies_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetEffectiveIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() client.batch_get_effective_iam_policies(request) @@ -7479,9 +8233,9 @@ def test_batch_get_effective_iam_policies_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7494,13 +8248,15 @@ async def test_batch_get_effective_iam_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetEffectiveIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse()) + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetEffectiveIamPoliciesResponse() + ) await client.batch_get_effective_iam_policies(request) # Establish that the underlying gRPC stub method was called. @@ -7511,16 +8267,19 @@ async def test_batch_get_effective_iam_policies_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPoliciesRequest, - dict, -]) -def test_analyze_org_policies(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPoliciesRequest, + dict, + ], +) +def test_analyze_org_policies(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7532,11 +8291,11 @@ def test_analyze_org_policies(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.analyze_org_policies(request) @@ -7548,7 +8307,7 @@ def test_analyze_org_policies(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): @@ -7556,34 +8315,37 @@ def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeOrgPoliciesRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.analyze_org_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_org_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPoliciesRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) + def test_analyze_org_policies_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7598,12 +8360,18 @@ def test_analyze_org_policies_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policies in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.analyze_org_policies] = ( + mock_rpc + ) request = {} client.analyze_org_policies(request) @@ -7616,8 +8384,11 @@ def test_analyze_org_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_org_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7631,12 +8402,17 @@ async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_org_policies in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_org_policies + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policies] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_org_policies + ] = mock_rpc request = {} await client.analyze_org_policies(request) @@ -7650,8 +8426,12 @@ async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPoliciesRequest): +async def test_analyze_org_policies_async( + transport: str = "grpc_asyncio", + request_type=asset_service.AnalyzeOrgPoliciesRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7663,12 +8443,14 @@ async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', reque # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.analyze_org_policies(request) # Establish that the underlying gRPC stub method was called. @@ -7679,13 +8461,14 @@ async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', reque # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_analyze_org_policies_async_from_dict(): await test_analyze_org_policies_async(request_type=dict) + def test_analyze_org_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7695,12 +8478,12 @@ def test_analyze_org_policies_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPoliciesResponse() client.analyze_org_policies(request) @@ -7712,9 +8495,9 @@ def test_analyze_org_policies_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7727,13 +8510,15 @@ async def test_analyze_org_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse()) + type(client.transport.analyze_org_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPoliciesResponse() + ) await client.analyze_org_policies(request) # Establish that the underlying gRPC stub method was called. @@ -7744,9 +8529,9 @@ async def test_analyze_org_policies_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] def test_analyze_org_policies_flattened(): @@ -7756,16 +8541,16 @@ def test_analyze_org_policies_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPoliciesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.analyze_org_policies( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -7773,13 +8558,13 @@ def test_analyze_org_policies_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @@ -7793,11 +8578,12 @@ def test_analyze_org_policies_flattened_error(): with pytest.raises(ValueError): client.analyze_org_policies( asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) + @pytest.mark.asyncio async def test_analyze_org_policies_flattened_async(): client = AssetServiceAsyncClient( @@ -7806,18 +8592,20 @@ async def test_analyze_org_policies_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPoliciesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPoliciesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.analyze_org_policies( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -7825,15 +8613,16 @@ async def test_analyze_org_policies_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val + @pytest.mark.asyncio async def test_analyze_org_policies_flattened_error_async(): client = AssetServiceAsyncClient( @@ -7845,9 +8634,9 @@ async def test_analyze_org_policies_flattened_error_async(): with pytest.raises(ValueError): await client.analyze_org_policies( asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -7859,8 +8648,8 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPoliciesResponse( @@ -7869,17 +8658,17 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ @@ -7894,9 +8683,7 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), ) pager = client.analyze_org_policies(request={}, retry=retry, timeout=timeout) @@ -7906,8 +8693,12 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) - for i in results) + assert all( + isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) + for i in results + ) + + def test_analyze_org_policies_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7916,8 +8707,8 @@ def test_analyze_org_policies_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPoliciesResponse( @@ -7926,17 +8717,17 @@ def test_analyze_org_policies_pages(transport_name: str = "grpc"): asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ @@ -7947,9 +8738,10 @@ def test_analyze_org_policies_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.analyze_org_policies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_analyze_org_policies_async_pager(): client = AssetServiceAsyncClient( @@ -7958,8 +8750,10 @@ async def test_analyze_org_policies_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPoliciesResponse( @@ -7968,17 +8762,17 @@ async def test_analyze_org_policies_async_pager(): asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ @@ -7988,15 +8782,19 @@ async def test_analyze_org_policies_async_pager(): ), RuntimeError, ) - async_pager = await client.analyze_org_policies(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.analyze_org_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) - for i in responses) + assert all( + isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) + for i in responses + ) @pytest.mark.asyncio @@ -8007,8 +8805,10 @@ async def test_analyze_org_policies_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPoliciesResponse( @@ -8017,17 +8817,17 @@ async def test_analyze_org_policies_async_pages(): asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ @@ -8040,18 +8840,22 @@ async def test_analyze_org_policies_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.analyze_org_policies(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - dict, -]) -def test_analyze_org_policy_governed_containers(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + dict, + ], +) +def test_analyze_org_policy_governed_containers(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8063,11 +8867,11 @@ def test_analyze_org_policy_governed_containers(request_type, transport: str = ' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.analyze_org_policy_governed_containers(request) @@ -8079,7 +8883,7 @@ def test_analyze_org_policy_governed_containers(request_type, transport: str = ' # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_populated_field(): @@ -8087,34 +8891,37 @@ def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_popu # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_org_policy_governed_containers(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) + def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8129,12 +8936,19 @@ def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_containers in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policy_governed_containers + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_containers] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_org_policy_governed_containers + ] = mock_rpc request = {} client.analyze_org_policy_governed_containers(request) @@ -8147,8 +8961,11 @@ def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8162,12 +8979,17 @@ async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_r wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_org_policy_governed_containers in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_org_policy_governed_containers + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_containers] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_org_policy_governed_containers + ] = mock_rpc request = {} await client.analyze_org_policy_governed_containers(request) @@ -8181,8 +9003,12 @@ async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_r assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): +async def test_analyze_org_policy_governed_containers_async( + transport: str = "grpc_asyncio", + request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -8194,12 +9020,14 @@ async def test_analyze_org_policy_governed_containers_async(transport: str = 'gr # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.analyze_org_policy_governed_containers(request) # Establish that the underlying gRPC stub method was called. @@ -8210,13 +9038,14 @@ async def test_analyze_org_policy_governed_containers_async(transport: str = 'gr # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_from_dict(): await test_analyze_org_policy_governed_containers_async(request_type=dict) + def test_analyze_org_policy_governed_containers_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8226,12 +9055,12 @@ def test_analyze_org_policy_governed_containers_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() client.analyze_org_policy_governed_containers(request) @@ -8243,9 +9072,9 @@ def test_analyze_org_policy_governed_containers_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -8258,13 +9087,15 @@ async def test_analyze_org_policy_governed_containers_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + ) await client.analyze_org_policy_governed_containers(request) # Establish that the underlying gRPC stub method was called. @@ -8275,9 +9106,9 @@ async def test_analyze_org_policy_governed_containers_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] def test_analyze_org_policy_governed_containers_flattened(): @@ -8287,16 +9118,16 @@ def test_analyze_org_policy_governed_containers_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.analyze_org_policy_governed_containers( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8304,13 +9135,13 @@ def test_analyze_org_policy_governed_containers_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @@ -8324,11 +9155,12 @@ def test_analyze_org_policy_governed_containers_flattened_error(): with pytest.raises(ValueError): client.analyze_org_policy_governed_containers( asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) + @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_flattened_async(): client = AssetServiceAsyncClient( @@ -8337,18 +9169,20 @@ async def test_analyze_org_policy_governed_containers_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.analyze_org_policy_governed_containers( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8356,15 +9190,16 @@ async def test_analyze_org_policy_governed_containers_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val + @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_flattened_error_async(): client = AssetServiceAsyncClient( @@ -8376,9 +9211,9 @@ async def test_analyze_org_policy_governed_containers_flattened_error_async(): with pytest.raises(ValueError): await client.analyze_org_policy_governed_containers( asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -8390,8 +9225,8 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedContainersResponse( @@ -8400,17 +9235,17 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ @@ -8425,11 +9260,11 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), + ) + pager = client.analyze_org_policy_governed_containers( + request={}, retry=retry, timeout=timeout ) - pager = client.analyze_org_policy_governed_containers(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -8437,8 +9272,15 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) - for i in results) + assert all( + isinstance( + i, + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer, + ) + for i in results + ) + + def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8447,8 +9289,8 @@ def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grp # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedContainersResponse( @@ -8457,17 +9299,17 @@ def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grp asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ @@ -8478,9 +9320,10 @@ def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grp RuntimeError, ) pages = list(client.analyze_org_policy_governed_containers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_pager(): client = AssetServiceAsyncClient( @@ -8489,8 +9332,10 @@ async def test_analyze_org_policy_governed_containers_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policy_governed_containers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedContainersResponse( @@ -8499,17 +9344,17 @@ async def test_analyze_org_policy_governed_containers_async_pager(): asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ @@ -8519,15 +9364,22 @@ async def test_analyze_org_policy_governed_containers_async_pager(): ), RuntimeError, ) - async_pager = await client.analyze_org_policy_governed_containers(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.analyze_org_policy_governed_containers( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) - for i in responses) + assert all( + isinstance( + i, + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer, + ) + for i in responses + ) @pytest.mark.asyncio @@ -8538,8 +9390,10 @@ async def test_analyze_org_policy_governed_containers_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policy_governed_containers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedContainersResponse( @@ -8548,17 +9402,17 @@ async def test_analyze_org_policy_governed_containers_async_pages(): asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ @@ -8571,18 +9425,22 @@ async def test_analyze_org_policy_governed_containers_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.analyze_org_policy_governed_containers(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - dict, -]) -def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + dict, + ], +) +def test_analyze_org_policy_governed_assets(request_type, transport: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8594,11 +9452,11 @@ def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.analyze_org_policy_governed_assets(request) @@ -8610,7 +9468,7 @@ def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populated_field(): @@ -8618,34 +9476,37 @@ def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populate # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.analyze_org_policy_governed_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) + def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8660,12 +9521,19 @@ def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_assets in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policy_governed_assets + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_assets] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_org_policy_governed_assets + ] = mock_rpc request = {} client.analyze_org_policy_governed_assets(request) @@ -8678,8 +9546,11 @@ def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8693,12 +9564,17 @@ async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(t wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.analyze_org_policy_governed_assets in client._client._transport._wrapped_methods + assert ( + client._client._transport.analyze_org_policy_governed_assets + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_assets] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.analyze_org_policy_governed_assets + ] = mock_rpc request = {} await client.analyze_org_policy_governed_assets(request) @@ -8712,8 +9588,12 @@ async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(t assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): +async def test_analyze_org_policy_governed_assets_async( + transport: str = "grpc_asyncio", + request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, +): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -8725,12 +9605,14 @@ async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_a # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.analyze_org_policy_governed_assets(request) # Establish that the underlying gRPC stub method was called. @@ -8741,13 +9623,14 @@ async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_a # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_from_dict(): await test_analyze_org_policy_governed_assets_async(request_type=dict) + def test_analyze_org_policy_governed_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8757,12 +9640,12 @@ def test_analyze_org_policy_governed_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() client.analyze_org_policy_governed_assets(request) @@ -8774,9 +9657,9 @@ def test_analyze_org_policy_governed_assets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -8789,13 +9672,15 @@ async def test_analyze_org_policy_governed_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + ) await client.analyze_org_policy_governed_assets(request) # Establish that the underlying gRPC stub method was called. @@ -8806,9 +9691,9 @@ async def test_analyze_org_policy_governed_assets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] + "x-goog-request-params", + "scope=scope_value", + ) in kw["metadata"] def test_analyze_org_policy_governed_assets_flattened(): @@ -8818,16 +9703,16 @@ def test_analyze_org_policy_governed_assets_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.analyze_org_policy_governed_assets( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8835,13 +9720,13 @@ def test_analyze_org_policy_governed_assets_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @@ -8855,11 +9740,12 @@ def test_analyze_org_policy_governed_assets_flattened_error(): with pytest.raises(ValueError): client.analyze_org_policy_governed_assets( asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) + @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_flattened_async(): client = AssetServiceAsyncClient( @@ -8868,18 +9754,20 @@ async def test_analyze_org_policy_governed_assets_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.analyze_org_policy_governed_assets( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8887,15 +9775,16 @@ async def test_analyze_org_policy_governed_assets_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val + @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_flattened_error_async(): client = AssetServiceAsyncClient( @@ -8907,9 +9796,9 @@ async def test_analyze_org_policy_governed_assets_flattened_error_async(): with pytest.raises(ValueError): await client.analyze_org_policy_governed_assets( asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -8921,8 +9810,8 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( @@ -8931,17 +9820,17 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ @@ -8956,11 +9845,11 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("scope", ""),)), + ) + pager = client.analyze_org_policy_governed_assets( + request={}, retry=retry, timeout=timeout ) - pager = client.analyze_org_policy_governed_assets(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -8968,8 +9857,14 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) - for i in results) + assert all( + isinstance( + i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset + ) + for i in results + ) + + def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8978,8 +9873,8 @@ def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( @@ -8988,17 +9883,17 @@ def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ @@ -9009,9 +9904,10 @@ def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.analyze_org_policy_governed_assets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_pager(): client = AssetServiceAsyncClient( @@ -9020,8 +9916,10 @@ async def test_analyze_org_policy_governed_assets_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policy_governed_assets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( @@ -9030,17 +9928,17 @@ async def test_analyze_org_policy_governed_assets_async_pager(): asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ @@ -9050,15 +9948,21 @@ async def test_analyze_org_policy_governed_assets_async_pager(): ), RuntimeError, ) - async_pager = await client.analyze_org_policy_governed_assets(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.analyze_org_policy_governed_assets( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) - for i in responses) + assert all( + isinstance( + i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset + ) + for i in responses + ) @pytest.mark.asyncio @@ -9069,8 +9973,10 @@ async def test_analyze_org_policy_governed_assets_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.analyze_org_policy_governed_assets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( @@ -9079,17 +9985,17 @@ async def test_analyze_org_policy_governed_assets_async_pages(): asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ @@ -9102,11 +10008,11 @@ async def test_analyze_org_policy_governed_assets_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.analyze_org_policy_governed_assets(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -9128,7 +10034,9 @@ def test_export_assets_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.export_assets] = mock_rpc request = {} @@ -9148,80 +10056,94 @@ def test_export_assets_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_export_assets_rest_required_fields(request_type=asset_service.ExportAssetsRequest): +def test_export_assets_rest_required_fields( + request_type=asset_service.ExportAssetsRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_assets(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_export_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.export_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "outputConfig", + ) + ) + ) def test_list_assets_rest_use_cached_wrapped_rpc(): @@ -9242,7 +10164,9 @@ def test_list_assets_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc request = {} @@ -9265,50 +10189,62 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_assets._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "content_type", "page_size", "page_token", "read_time", "relationship_types", )) + assert not set(unset_fields) - set( + ( + "asset_types", + "content_type", + "page_size", + "page_token", + "read_time", + "relationship_types", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.ListAssetsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9319,23 +10255,36 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR return_value = asset_service.ListAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_assets(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", "relationshipTypes", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "assetTypes", + "contentType", + "pageSize", + "pageToken", + "readTime", + "relationshipTypes", + ) + ) + & set(("parent",)) + ) def test_list_assets_rest_flattened(): @@ -9345,16 +10294,16 @@ def test_list_assets_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListAssetsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -9364,7 +10313,7 @@ def test_list_assets_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.ListAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9374,10 +10323,12 @@ def test_list_assets_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/assets" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=*/*}/assets" % client.transport._host, args[1] + ) -def test_list_assets_rest_flattened_error(transport: str = 'rest'): +def test_list_assets_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9388,20 +10339,20 @@ def test_list_assets_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_assets( asset_service.ListAssetsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_assets_rest_pager(transport: str = 'rest'): +def test_list_assets_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.ListAssetsResponse( @@ -9410,17 +10361,17 @@ def test_list_assets_rest_pager(transport: str = 'rest'): assets.Asset(), assets.Asset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListAssetsResponse( assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListAssetsResponse( assets=[ assets.Asset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListAssetsResponse( assets=[ @@ -9436,21 +10387,20 @@ def test_list_assets_rest_pager(transport: str = 'rest'): response = tuple(asset_service.ListAssetsResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} pager = client.list_assets(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.Asset) - for i in results) + assert all(isinstance(i, assets.Asset) for i in results) pages = list(client.list_assets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -9468,12 +10418,19 @@ def test_batch_get_assets_history_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.batch_get_assets_history in client._transport._wrapped_methods + assert ( + client._transport.batch_get_assets_history + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_assets_history] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_assets_history + ] = mock_rpc request = {} client.batch_get_assets_history(request) @@ -9488,57 +10445,69 @@ def test_batch_get_assets_history_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_batch_get_assets_history_rest_required_fields(request_type=asset_service.BatchGetAssetsHistoryRequest): +def test_batch_get_assets_history_rest_required_fields( + request_type=asset_service.BatchGetAssetsHistoryRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_get_assets_history._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_get_assets_history._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_names", "content_type", "read_time_window", "relationship_types", )) + assert not set(unset_fields) - set( + ( + "asset_names", + "content_type", + "read_time_window", + "relationship_types", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.BatchGetAssetsHistoryResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9549,23 +10518,34 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_assets_history(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_batch_get_assets_history_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.batch_get_assets_history._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", "relationshipTypes", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "assetNames", + "contentType", + "readTimeWindow", + "relationshipTypes", + ) + ) + & set(("parent",)) + ) def test_create_feed_rest_use_cached_wrapped_rpc(): @@ -9586,7 +10566,9 @@ def test_create_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc request = {} @@ -9610,53 +10592,56 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR request_init["feed_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' - jsonified_request["feedId"] = 'feed_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["feedId"] = "feed_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "feedId" in jsonified_request - assert jsonified_request["feedId"] == 'feed_id_value' + assert jsonified_request["feedId"] == "feed_id_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -9666,23 +10651,33 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_feed(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "feedId", + "feed", + ) + ) + ) def test_create_feed_rest_flattened(): @@ -9692,16 +10687,16 @@ def test_create_feed_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -9711,7 +10706,7 @@ def test_create_feed_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9721,10 +10716,12 @@ def test_create_feed_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1] + ) -def test_create_feed_rest_flattened_error(transport: str = 'rest'): +def test_create_feed_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9735,7 +10732,7 @@ def test_create_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_feed( asset_service.CreateFeedRequest(), - parent='parent_value', + parent="parent_value", ) @@ -9757,7 +10754,9 @@ def test_get_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc request = {} @@ -9780,48 +10779,51 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9832,23 +10834,24 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_feed(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_feed_rest_flattened(): @@ -9858,16 +10861,16 @@ def test_get_feed_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} + sample_request = {"name": "sample1/sample2/feeds/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -9877,7 +10880,7 @@ def test_get_feed_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9887,10 +10890,12 @@ def test_get_feed_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1] + ) -def test_get_feed_rest_flattened_error(transport: str = 'rest'): +def test_get_feed_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9901,7 +10906,7 @@ def test_get_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_feed( asset_service.GetFeedRequest(), - name='name_value', + name="name_value", ) @@ -9923,7 +10928,9 @@ def test_list_feeds_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc request = {} @@ -9946,48 +10953,51 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_feeds._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_feeds._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.ListFeedsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9998,23 +11008,24 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq return_value = asset_service.ListFeedsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_feeds(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_feeds_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_feeds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) + assert set(unset_fields) == (set(()) & set(("parent",))) def test_list_feeds_rest_flattened(): @@ -10024,16 +11035,16 @@ def test_list_feeds_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListFeedsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -10043,7 +11054,7 @@ def test_list_feeds_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.ListFeedsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10053,10 +11064,12 @@ def test_list_feeds_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1] + ) -def test_list_feeds_rest_flattened_error(transport: str = 'rest'): +def test_list_feeds_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10067,7 +11080,7 @@ def test_list_feeds_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_feeds( asset_service.ListFeedsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -10089,7 +11102,9 @@ def test_update_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc request = {} @@ -10111,46 +11126,49 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -10160,23 +11178,32 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_feed(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "feed", + "updateMask", + ) + ) + ) def test_update_feed_rest_flattened(): @@ -10186,16 +11213,16 @@ def test_update_feed_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed() # get arguments that satisfy an http rule for this method - sample_request = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + sample_request = {"feed": {"name": "sample1/sample2/feeds/sample3"}} # get truthy value for each flattened field mock_args = dict( - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) mock_args.update(sample_request) @@ -10205,7 +11232,7 @@ def test_update_feed_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10215,10 +11242,12 @@ def test_update_feed_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{feed.name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{feed.name=*/*/feeds/*}" % client.transport._host, args[1] + ) -def test_update_feed_rest_flattened_error(transport: str = 'rest'): +def test_update_feed_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10229,7 +11258,7 @@ def test_update_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_feed( asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) @@ -10251,7 +11280,9 @@ def test_delete_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc request = {} @@ -10274,72 +11305,76 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = None # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = '' + json_return_value = "" - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_feed(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_delete_feed_rest_flattened(): @@ -10349,24 +11384,24 @@ def test_delete_feed_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} + sample_request = {"name": "sample1/sample2/feeds/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10376,10 +11411,12 @@ def test_delete_feed_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1] + ) -def test_delete_feed_rest_flattened_error(transport: str = 'rest'): +def test_delete_feed_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10390,7 +11427,7 @@ def test_delete_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_feed( asset_service.DeleteFeedRequest(), - name='name_value', + name="name_value", ) @@ -10408,12 +11445,18 @@ def test_search_all_resources_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.search_all_resources in client._transport._wrapped_methods + assert ( + client._transport.search_all_resources in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.search_all_resources] = ( + mock_rpc + ) request = {} client.search_all_resources(request) @@ -10428,57 +11471,71 @@ def test_search_all_resources_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_search_all_resources_rest_required_fields(request_type=asset_service.SearchAllResourcesRequest): +def test_search_all_resources_rest_required_fields( + request_type=asset_service.SearchAllResourcesRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["scope"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_resources._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["scope"] = 'scope_value' + jsonified_request["scope"] = "scope_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_resources._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", "read_mask", )) + assert not set(unset_fields) - set( + ( + "asset_types", + "order_by", + "page_size", + "page_token", + "query", + "read_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllResourcesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -10489,23 +11546,36 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se return_value = asset_service.SearchAllResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_resources(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_search_all_resources_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.search_all_resources._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", "readMask", )) & set(("scope", ))) + assert set(unset_fields) == ( + set( + ( + "assetTypes", + "orderBy", + "pageSize", + "pageToken", + "query", + "readMask", + ) + ) + & set(("scope",)) + ) def test_search_all_resources_rest_flattened(): @@ -10515,18 +11585,18 @@ def test_search_all_resources_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllResourcesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) mock_args.update(sample_request) @@ -10536,7 +11606,7 @@ def test_search_all_resources_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.SearchAllResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10546,10 +11616,12 @@ def test_search_all_resources_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:searchAllResources" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{scope=*/*}:searchAllResources" % client.transport._host, args[1] + ) -def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): +def test_search_all_resources_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10560,22 +11632,22 @@ def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.search_all_resources( asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) -def test_search_all_resources_rest_pager(transport: str = 'rest'): +def test_search_all_resources_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.SearchAllResourcesResponse( @@ -10584,17 +11656,17 @@ def test_search_all_resources_rest_pager(transport: str = 'rest'): assets.ResourceSearchResult(), assets.ResourceSearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllResourcesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllResourcesResponse( results=[ assets.ResourceSearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllResourcesResponse( results=[ @@ -10607,24 +11679,25 @@ def test_search_all_resources_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllResourcesResponse.to_json(x) for x in response) + response = tuple( + asset_service.SearchAllResourcesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} pager = client.search_all_resources(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.ResourceSearchResult) - for i in results) + assert all(isinstance(i, assets.ResourceSearchResult) for i in results) pages = list(client.search_all_resources(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -10642,12 +11715,19 @@ def test_search_all_iam_policies_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.search_all_iam_policies in client._transport._wrapped_methods + assert ( + client._transport.search_all_iam_policies + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.search_all_iam_policies + ] = mock_rpc request = {} client.search_all_iam_policies(request) @@ -10662,57 +11742,70 @@ def test_search_all_iam_policies_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_search_all_iam_policies_rest_required_fields(request_type=asset_service.SearchAllIamPoliciesRequest): +def test_search_all_iam_policies_rest_required_fields( + request_type=asset_service.SearchAllIamPoliciesRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["scope"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_iam_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["scope"] = 'scope_value' + jsonified_request["scope"] = "scope_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) + assert not set(unset_fields) - set( + ( + "asset_types", + "order_by", + "page_size", + "page_token", + "query", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllIamPoliciesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -10723,23 +11816,35 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_iam_policies(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_search_all_iam_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.search_all_iam_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) + assert set(unset_fields) == ( + set( + ( + "assetTypes", + "orderBy", + "pageSize", + "pageToken", + "query", + ) + ) + & set(("scope",)) + ) def test_search_all_iam_policies_rest_flattened(): @@ -10749,17 +11854,17 @@ def test_search_all_iam_policies_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllIamPoliciesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) mock_args.update(sample_request) @@ -10769,7 +11874,7 @@ def test_search_all_iam_policies_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10779,10 +11884,12 @@ def test_search_all_iam_policies_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:searchAllIamPolicies" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{scope=*/*}:searchAllIamPolicies" % client.transport._host, args[1] + ) -def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): +def test_search_all_iam_policies_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10793,21 +11900,21 @@ def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.search_all_iam_policies( asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) -def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): +def test_search_all_iam_policies_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.SearchAllIamPoliciesResponse( @@ -10816,17 +11923,17 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): assets.IamPolicySearchResult(), assets.IamPolicySearchResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.SearchAllIamPoliciesResponse( results=[], - next_page_token='def', + next_page_token="def", ), asset_service.SearchAllIamPoliciesResponse( results=[ assets.IamPolicySearchResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.SearchAllIamPoliciesResponse( results=[ @@ -10839,24 +11946,25 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllIamPoliciesResponse.to_json(x) for x in response) + response = tuple( + asset_service.SearchAllIamPoliciesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} pager = client.search_all_iam_policies(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in results) + assert all(isinstance(i, assets.IamPolicySearchResult) for i in results) pages = list(client.search_all_iam_policies(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -10874,12 +11982,18 @@ def test_analyze_iam_policy_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_iam_policy in client._transport._wrapped_methods + assert ( + client._transport.analyze_iam_policy in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.analyze_iam_policy] = ( + mock_rpc + ) request = {} client.analyze_iam_policy(request) @@ -10894,52 +12008,63 @@ def test_analyze_iam_policy_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyRequest): +def test_analyze_iam_policy_rest_required_fields( + request_type=asset_service.AnalyzeIamPolicyRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_iam_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_iam_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("analysis_query", "execution_timeout", "saved_analysis_query", )) + assert not set(unset_fields) - set( + ( + "analysis_query", + "execution_timeout", + "saved_analysis_query", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeIamPolicyResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -10950,23 +12075,33 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_iam_policy_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.analyze_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", "savedAnalysisQuery", )) & set(("analysisQuery", ))) + assert set(unset_fields) == ( + set( + ( + "analysisQuery", + "executionTimeout", + "savedAnalysisQuery", + ) + ) + & set(("analysisQuery",)) + ) def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): @@ -10983,12 +12118,19 @@ def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_iam_policy_longrunning in client._transport._wrapped_methods + assert ( + client._transport.analyze_iam_policy_longrunning + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy_longrunning] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_iam_policy_longrunning + ] = mock_rpc request = {} client.analyze_iam_policy_longrunning(request) @@ -11007,75 +12149,91 @@ def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +def test_analyze_iam_policy_longrunning_rest_required_fields( + request_type=asset_service.AnalyzeIamPolicyLongrunningRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy_longrunning(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) + unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set(()) + & set( + ( + "analysisQuery", + "outputConfig", + ) + ) + ) def test_analyze_move_rest_use_cached_wrapped_rpc(): @@ -11096,7 +12254,9 @@ def test_analyze_move_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.analyze_move] = mock_rpc request = {} @@ -11112,7 +12272,9 @@ def test_analyze_move_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMoveRequest): +def test_analyze_move_rest_required_fields( + request_type=asset_service.AnalyzeMoveRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -11120,56 +12282,64 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov request_init["destination_parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "destinationParent" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_move._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "destinationParent" in jsonified_request assert jsonified_request["destinationParent"] == request_init["destination_parent"] - jsonified_request["resource"] = 'resource_value' - jsonified_request["destinationParent"] = 'destination_parent_value' + jsonified_request["resource"] = "resource_value" + jsonified_request["destinationParent"] = "destination_parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_move._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("destination_parent", "view", )) + assert not set(unset_fields) - set( + ( + "destination_parent", + "view", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' + assert jsonified_request["resource"] == "resource_value" assert "destinationParent" in jsonified_request - assert jsonified_request["destinationParent"] == 'destination_parent_value' + assert jsonified_request["destinationParent"] == "destination_parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeMoveResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -11180,7 +12350,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov return_value = asset_service.AnalyzeMoveResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11192,15 +12362,30 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_move_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.analyze_move._get_unset_required_fields({}) - assert set(unset_fields) == (set(("destinationParent", "view", )) & set(("resource", "destinationParent", ))) + assert set(unset_fields) == ( + set( + ( + "destinationParent", + "view", + ) + ) + & set( + ( + "resource", + "destinationParent", + ) + ) + ) def test_query_assets_rest_use_cached_wrapped_rpc(): @@ -11221,7 +12406,9 @@ def test_query_assets_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc request = {} @@ -11237,57 +12424,62 @@ def test_query_assets_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_query_assets_rest_required_fields(request_type=asset_service.QueryAssetsRequest): +def test_query_assets_rest_required_fields( + request_type=asset_service.QueryAssetsRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.QueryAssetsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -11297,23 +12489,24 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset return_value = asset_service.QueryAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.query_assets(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_query_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.query_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) + assert set(unset_fields) == (set(()) & set(("parent",))) def test_create_saved_query_rest_use_cached_wrapped_rpc(): @@ -11330,12 +12523,18 @@ def test_create_saved_query_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_saved_query in client._transport._wrapped_methods + assert ( + client._transport.create_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_saved_query] = ( + mock_rpc + ) request = {} client.create_saved_query(request) @@ -11350,7 +12549,9 @@ def test_create_saved_query_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_saved_query_rest_required_fields(request_type=asset_service.CreateSavedQueryRequest): +def test_create_saved_query_rest_required_fields( + request_type=asset_service.CreateSavedQueryRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -11358,58 +12559,61 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea request_init["saved_query_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "savedQueryId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "savedQueryId" in jsonified_request assert jsonified_request["savedQueryId"] == request_init["saved_query_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["savedQueryId"] = 'saved_query_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["savedQueryId"] = "saved_query_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_saved_query._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("saved_query_id", )) + assert not set(unset_fields) - set(("saved_query_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "savedQueryId" in jsonified_request - assert jsonified_request["savedQueryId"] == 'saved_query_id_value' + assert jsonified_request["savedQueryId"] == "saved_query_id_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -11419,7 +12623,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11431,15 +12635,26 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(("savedQueryId", )) & set(("parent", "savedQuery", "savedQueryId", ))) + assert set(unset_fields) == ( + set(("savedQueryId",)) + & set( + ( + "parent", + "savedQuery", + "savedQueryId", + ) + ) + ) def test_create_saved_query_rest_flattened(): @@ -11449,18 +12664,18 @@ def test_create_saved_query_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) mock_args.update(sample_request) @@ -11470,7 +12685,7 @@ def test_create_saved_query_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11480,10 +12695,12 @@ def test_create_saved_query_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1] + ) -def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): +def test_create_saved_query_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11494,9 +12711,9 @@ def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_saved_query( asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) @@ -11518,7 +12735,9 @@ def test_get_saved_query_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_saved_query] = mock_rpc request = {} @@ -11534,55 +12753,60 @@ def test_get_saved_query_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSavedQueryRequest): +def test_get_saved_query_rest_required_fields( + request_type=asset_service.GetSavedQueryRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -11593,23 +12817,24 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_saved_query(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_saved_query_rest_flattened(): @@ -11619,16 +12844,16 @@ def test_get_saved_query_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/savedQueries/sample3'} + sample_request = {"name": "sample1/sample2/savedQueries/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -11638,7 +12863,7 @@ def test_get_saved_query_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11648,10 +12873,12 @@ def test_get_saved_query_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1] + ) -def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): +def test_get_saved_query_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11662,7 +12889,7 @@ def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_saved_query( asset_service.GetSavedQueryRequest(), - name='name_value', + name="name_value", ) @@ -11680,12 +12907,18 @@ def test_list_saved_queries_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_saved_queries in client._transport._wrapped_methods + assert ( + client._transport.list_saved_queries in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_saved_queries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_saved_queries] = ( + mock_rpc + ) request = {} client.list_saved_queries(request) @@ -11700,57 +12933,68 @@ def test_list_saved_queries_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_saved_queries_rest_required_fields(request_type=asset_service.ListSavedQueriesRequest): +def test_list_saved_queries_rest_required_fields( + request_type=asset_service.ListSavedQueriesRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_saved_queries._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_saved_queries._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.ListSavedQueriesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -11761,23 +13005,33 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_saved_queries(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_saved_queries_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_saved_queries._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_saved_queries_rest_flattened(): @@ -11787,16 +13041,16 @@ def test_list_saved_queries_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListSavedQueriesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -11806,7 +13060,7 @@ def test_list_saved_queries_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11816,10 +13070,12 @@ def test_list_saved_queries_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1] + ) -def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): +def test_list_saved_queries_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11830,20 +13086,20 @@ def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_saved_queries( asset_service.ListSavedQueriesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_saved_queries_rest_pager(transport: str = 'rest'): +def test_list_saved_queries_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.ListSavedQueriesResponse( @@ -11852,17 +13108,17 @@ def test_list_saved_queries_rest_pager(transport: str = 'rest'): asset_service.SavedQuery(), asset_service.SavedQuery(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.ListSavedQueriesResponse( saved_queries=[], - next_page_token='def', + next_page_token="def", ), asset_service.ListSavedQueriesResponse( saved_queries=[ asset_service.SavedQuery(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.ListSavedQueriesResponse( saved_queries=[ @@ -11875,24 +13131,25 @@ def test_list_saved_queries_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.ListSavedQueriesResponse.to_json(x) for x in response) + response = tuple( + asset_service.ListSavedQueriesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'sample1/sample2'} + sample_request = {"parent": "sample1/sample2"} pager = client.list_saved_queries(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.SavedQuery) - for i in results) + assert all(isinstance(i, asset_service.SavedQuery) for i in results) pages = list(client.list_saved_queries(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -11910,12 +13167,18 @@ def test_update_saved_query_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_saved_query in client._transport._wrapped_methods + assert ( + client._transport.update_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_saved_query] = ( + mock_rpc + ) request = {} client.update_saved_query(request) @@ -11930,54 +13193,59 @@ def test_update_saved_query_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_saved_query_rest_required_fields(request_type=asset_service.UpdateSavedQueryRequest): +def test_update_saved_query_rest_required_fields( + request_type=asset_service.UpdateSavedQueryRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_saved_query._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -11987,23 +13255,32 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_saved_query(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("savedQuery", "updateMask", ))) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "savedQuery", + "updateMask", + ) + ) + ) def test_update_saved_query_rest_flattened(): @@ -12013,17 +13290,19 @@ def test_update_saved_query_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method - sample_request = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + sample_request = { + "saved_query": {"name": "sample1/sample2/savedQueries/sample3"} + } # get truthy value for each flattened field mock_args = dict( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -12033,7 +13312,7 @@ def test_update_saved_query_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12043,10 +13322,13 @@ def test_update_saved_query_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{saved_query.name=*/*/savedQueries/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{saved_query.name=*/*/savedQueries/*}" % client.transport._host, + args[1], + ) -def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): +def test_update_saved_query_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12057,8 +13339,8 @@ def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_saved_query( asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -12076,12 +13358,18 @@ def test_delete_saved_query_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_saved_query in client._transport._wrapped_methods + assert ( + client._transport.delete_saved_query in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_saved_query] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_saved_query] = ( + mock_rpc + ) request = {} client.delete_saved_query(request) @@ -12096,79 +13384,85 @@ def test_delete_saved_query_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_saved_query_rest_required_fields(request_type=asset_service.DeleteSavedQueryRequest): +def test_delete_saved_query_rest_required_fields( + request_type=asset_service.DeleteSavedQueryRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = None # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = '' + json_return_value = "" - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_saved_query(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_delete_saved_query_rest_flattened(): @@ -12178,24 +13472,24 @@ def test_delete_saved_query_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/savedQueries/sample3'} + sample_request = {"name": "sample1/sample2/savedQueries/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12205,10 +13499,12 @@ def test_delete_saved_query_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1] + ) -def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): +def test_delete_saved_query_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12219,7 +13515,7 @@ def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_saved_query( asset_service.DeleteSavedQueryRequest(), - name='name_value', + name="name_value", ) @@ -12237,12 +13533,19 @@ def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.batch_get_effective_iam_policies in client._transport._wrapped_methods + assert ( + client._transport.batch_get_effective_iam_policies + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_effective_iam_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_effective_iam_policies + ] = mock_rpc request = {} client.batch_get_effective_iam_policies(request) @@ -12257,7 +13560,9 @@ def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): +def test_batch_get_effective_iam_policies_rest_required_fields( + request_type=asset_service.BatchGetEffectiveIamPoliciesRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -12265,56 +13570,59 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse request_init["names"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "names" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "names" in jsonified_request assert jsonified_request["names"] == request_init["names"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["names"] = 'names_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["names"] = "names_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("names", )) + assert not set(unset_fields) - set(("names",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "names" in jsonified_request - assert jsonified_request["names"] == 'names_value' + assert jsonified_request["names"] == "names_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -12322,10 +13630,12 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12337,15 +13647,27 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_batch_get_effective_iam_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.batch_get_effective_iam_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("names", )) & set(("scope", "names", ))) + unset_fields = ( + transport.batch_get_effective_iam_policies._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set(("names",)) + & set( + ( + "scope", + "names", + ) + ) + ) def test_analyze_org_policies_rest_use_cached_wrapped_rpc(): @@ -12362,12 +13684,18 @@ def test_analyze_org_policies_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policies in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policies in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policies] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.analyze_org_policies] = ( + mock_rpc + ) request = {} client.analyze_org_policies(request) @@ -12382,7 +13710,9 @@ def test_analyze_org_policies_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_org_policies_rest_required_fields(request_type=asset_service.AnalyzeOrgPoliciesRequest): +def test_analyze_org_policies_rest_required_fields( + request_type=asset_service.AnalyzeOrgPoliciesRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -12390,56 +13720,66 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "constraint" in jsonified_request assert jsonified_request["constraint"] == request_init["constraint"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["constraint"] = "constraint_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "constraint", + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' + assert jsonified_request["constraint"] == "constraint_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPoliciesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -12450,7 +13790,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12462,15 +13802,32 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_org_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.analyze_org_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) + assert set(unset_fields) == ( + set( + ( + "constraint", + "filter", + "pageSize", + "pageToken", + ) + ) + & set( + ( + "scope", + "constraint", + ) + ) + ) def test_analyze_org_policies_rest_flattened(): @@ -12480,18 +13837,18 @@ def test_analyze_org_policies_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPoliciesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) mock_args.update(sample_request) @@ -12501,7 +13858,7 @@ def test_analyze_org_policies_rest_flattened(): # Convert return value to protobuf type return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12511,10 +13868,12 @@ def test_analyze_org_policies_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicies" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{scope=*/*}:analyzeOrgPolicies" % client.transport._host, args[1] + ) -def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): +def test_analyze_org_policies_rest_flattened_error(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12525,22 +13884,22 @@ def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.analyze_org_policies( asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) -def test_analyze_org_policies_rest_pager(transport: str = 'rest'): +def test_analyze_org_policies_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.AnalyzeOrgPoliciesResponse( @@ -12549,17 +13908,17 @@ def test_analyze_org_policies_rest_pager(transport: str = 'rest'): asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPoliciesResponse( org_policy_results=[ @@ -12572,24 +13931,28 @@ def test_analyze_org_policies_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.AnalyzeOrgPoliciesResponse.to_json(x) for x in response) + response = tuple( + asset_service.AnalyzeOrgPoliciesResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} pager = client.analyze_org_policies(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) - for i in results) + assert all( + isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) + for i in results + ) pages = list(client.analyze_org_policies(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -12607,12 +13970,19 @@ def test_analyze_org_policy_governed_containers_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_containers in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policy_governed_containers + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_containers] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_org_policy_governed_containers + ] = mock_rpc request = {} client.analyze_org_policy_governed_containers(request) @@ -12627,7 +13997,9 @@ def test_analyze_org_policy_governed_containers_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_org_policy_governed_containers_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): +def test_analyze_org_policy_governed_containers_rest_required_fields( + request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -12635,56 +14007,70 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policy_governed_containers._get_unset_required_fields( + jsonified_request + ) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "constraint" in jsonified_request assert jsonified_request["constraint"] == request_init["constraint"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["constraint"] = "constraint_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policy_governed_containers._get_unset_required_fields( + jsonified_request + ) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "constraint", + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' + assert jsonified_request["constraint"] == "constraint_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -12692,10 +14078,12 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12707,15 +14095,34 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_org_policy_governed_containers_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.analyze_org_policy_governed_containers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) + unset_fields = ( + transport.analyze_org_policy_governed_containers._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "constraint", + "filter", + "pageSize", + "pageToken", + ) + ) + & set( + ( + "scope", + "constraint", + ) + ) + ) def test_analyze_org_policy_governed_containers_rest_flattened(): @@ -12725,18 +14132,18 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) mock_args.update(sample_request) @@ -12744,9 +14151,11 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12756,10 +14165,16 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers" + % client.transport._host, + args[1], + ) -def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: str = 'rest'): +def test_analyze_org_policy_governed_containers_rest_flattened_error( + transport: str = "rest", +): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12770,22 +14185,22 @@ def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: with pytest.raises(ValueError): client.analyze_org_policy_governed_containers( asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) -def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'rest'): +def test_analyze_org_policy_governed_containers_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.AnalyzeOrgPolicyGovernedContainersResponse( @@ -12794,17 +14209,17 @@ def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'res asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedContainersResponse( governed_containers=[ @@ -12817,24 +14232,34 @@ def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'res response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(x) for x in response) + response = tuple( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} pager = client.analyze_org_policy_governed_containers(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) - for i in results) + assert all( + isinstance( + i, + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer, + ) + for i in results + ) - pages = list(client.analyze_org_policy_governed_containers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + pages = list( + client.analyze_org_policy_governed_containers(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -12852,12 +14277,19 @@ def test_analyze_org_policy_governed_assets_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_assets in client._transport._wrapped_methods + assert ( + client._transport.analyze_org_policy_governed_assets + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_assets] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.analyze_org_policy_governed_assets + ] = mock_rpc request = {} client.analyze_org_policy_governed_assets(request) @@ -12872,7 +14304,9 @@ def test_analyze_org_policy_governed_assets_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): +def test_analyze_org_policy_governed_assets_rest_required_fields( + request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, +): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -12880,56 +14314,66 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "constraint" in jsonified_request assert jsonified_request["constraint"] == request_init["constraint"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["constraint"] = "constraint_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "constraint", + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' + assert jsonified_request["constraint"] == "constraint_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -12937,10 +14381,12 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -12952,15 +14398,34 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_analyze_org_policy_governed_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - unset_fields = transport.analyze_org_policy_governed_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) + unset_fields = ( + transport.analyze_org_policy_governed_assets._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "constraint", + "filter", + "pageSize", + "pageToken", + ) + ) + & set( + ( + "scope", + "constraint", + ) + ) + ) def test_analyze_org_policy_governed_assets_rest_flattened(): @@ -12970,18 +14435,18 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) mock_args.update(sample_request) @@ -12989,9 +14454,11 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -13001,10 +14468,15 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets" % client.transport._host, + args[1], + ) -def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str = 'rest'): +def test_analyze_org_policy_governed_assets_rest_flattened_error( + transport: str = "rest", +): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13015,22 +14487,22 @@ def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str with pytest.raises(ValueError): client.analyze_org_policy_governed_assets( asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) -def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): +def test_analyze_org_policy_governed_assets_rest_pager(transport: str = "rest"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( @@ -13039,17 +14511,17 @@ def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='abc', + next_page_token="abc", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[], - next_page_token='def', + next_page_token="def", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), ], - next_page_token='ghi', + next_page_token="ghi", ), asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( governed_assets=[ @@ -13062,24 +14534,33 @@ def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(x) for x in response) + response = tuple( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(x) + for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'scope': 'sample1/sample2'} + sample_request = {"scope": "sample1/sample2"} pager = client.analyze_org_policy_governed_assets(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) - for i in results) + assert all( + isinstance( + i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset + ) + for i in results + ) - pages = list(client.analyze_org_policy_governed_assets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + pages = list( + client.analyze_org_policy_governed_assets(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -13121,8 +14602,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = AssetServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -13144,6 +14624,7 @@ def test_transport_instance(): client = AssetServiceClient(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.AssetServiceGrpcTransport( @@ -13158,18 +14639,23 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssetServiceGrpcTransport, + transports.AssetServiceGrpcAsyncIOTransport, + transports.AssetServiceRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = AssetServiceClient.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -13179,8 +14665,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -13194,10 +14679,8 @@ def test_export_assets_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.export_assets(request=None) # Establish that the underlying stub method was called. @@ -13217,9 +14700,7 @@ def test_list_assets_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: call.return_value = asset_service.ListAssetsResponse() client.list_assets(request=None) @@ -13241,8 +14722,8 @@ def test_batch_get_assets_history_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: call.return_value = asset_service.BatchGetAssetsHistoryResponse() client.batch_get_assets_history(request=None) @@ -13263,9 +14744,7 @@ def test_create_feed_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: call.return_value = asset_service.Feed() client.create_feed(request=None) @@ -13286,9 +14765,7 @@ def test_get_feed_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: call.return_value = asset_service.Feed() client.get_feed(request=None) @@ -13309,9 +14786,7 @@ def test_list_feeds_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: call.return_value = asset_service.ListFeedsResponse() client.list_feeds(request=None) @@ -13332,9 +14807,7 @@ def test_update_feed_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: call.return_value = asset_service.Feed() client.update_feed(request=None) @@ -13355,9 +14828,7 @@ def test_delete_feed_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: call.return_value = None client.delete_feed(request=None) @@ -13379,8 +14850,8 @@ def test_search_all_resources_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: call.return_value = asset_service.SearchAllResourcesResponse() client.search_all_resources(request=None) @@ -13402,8 +14873,8 @@ def test_search_all_iam_policies_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: call.return_value = asset_service.SearchAllIamPoliciesResponse() client.search_all_iam_policies(request=None) @@ -13425,8 +14896,8 @@ def test_analyze_iam_policy_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: call.return_value = asset_service.AnalyzeIamPolicyResponse() client.analyze_iam_policy(request=None) @@ -13448,9 +14919,9 @@ def test_analyze_iam_policy_longrunning_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.analyze_iam_policy_longrunning(request=None) # Establish that the underlying stub method was called. @@ -13470,9 +14941,7 @@ def test_analyze_move_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: call.return_value = asset_service.AnalyzeMoveResponse() client.analyze_move(request=None) @@ -13493,9 +14962,7 @@ def test_query_assets_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: call.return_value = asset_service.QueryAssetsResponse() client.query_assets(request=None) @@ -13517,8 +14984,8 @@ def test_create_saved_query_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: call.return_value = asset_service.SavedQuery() client.create_saved_query(request=None) @@ -13539,9 +15006,7 @@ def test_get_saved_query_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: call.return_value = asset_service.SavedQuery() client.get_saved_query(request=None) @@ -13563,8 +15028,8 @@ def test_list_saved_queries_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: call.return_value = asset_service.ListSavedQueriesResponse() client.list_saved_queries(request=None) @@ -13586,8 +15051,8 @@ def test_update_saved_query_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: call.return_value = asset_service.SavedQuery() client.update_saved_query(request=None) @@ -13609,8 +15074,8 @@ def test_delete_saved_query_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: call.return_value = None client.delete_saved_query(request=None) @@ -13632,8 +15097,8 @@ def test_batch_get_effective_iam_policies_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() client.batch_get_effective_iam_policies(request=None) @@ -13655,8 +15120,8 @@ def test_analyze_org_policies_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPoliciesResponse() client.analyze_org_policies(request=None) @@ -13678,8 +15143,8 @@ def test_analyze_org_policy_governed_containers_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() client.analyze_org_policy_governed_containers(request=None) @@ -13701,8 +15166,8 @@ def test_analyze_org_policy_governed_assets_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() client.analyze_org_policy_governed_assets(request=None) @@ -13723,8 +15188,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -13739,12 +15203,10 @@ async def test_export_assets_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.export_assets(request=None) @@ -13766,13 +15228,13 @@ async def test_list_assets_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListAssetsResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_assets(request=None) # Establish that the underlying stub method was called. @@ -13794,11 +15256,12 @@ async def test_batch_get_assets_history_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetAssetsHistoryResponse() + ) await client.batch_get_assets_history(request=None) # Establish that the underlying stub method was called. @@ -13819,17 +15282,17 @@ async def test_create_feed_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) await client.create_feed(request=None) # Establish that the underlying stub method was called. @@ -13850,17 +15313,17 @@ async def test_get_feed_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) await client.get_feed(request=None) # Establish that the underlying stub method was called. @@ -13881,12 +15344,11 @@ async def test_list_feeds_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListFeedsResponse() + ) await client.list_feeds(request=None) # Establish that the underlying stub method was called. @@ -13907,17 +15369,17 @@ async def test_update_feed_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.Feed( + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], + ) + ) await client.update_feed(request=None) # Establish that the underlying stub method was called. @@ -13938,9 +15400,7 @@ async def test_delete_feed_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_feed(request=None) @@ -13964,12 +15424,14 @@ async def test_search_all_resources_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllResourcesResponse( + next_page_token="next_page_token_value", + ) + ) await client.search_all_resources(request=None) # Establish that the underlying stub method was called. @@ -13991,12 +15453,14 @@ async def test_search_all_iam_policies_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SearchAllIamPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) await client.search_all_iam_policies(request=None) # Establish that the underlying stub method was called. @@ -14018,12 +15482,14 @@ async def test_analyze_iam_policy_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + ) + ) await client.analyze_iam_policy(request=None) # Establish that the underlying stub method was called. @@ -14045,11 +15511,11 @@ async def test_analyze_iam_policy_longrunning_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.analyze_iam_policy_longrunning(request=None) @@ -14071,12 +15537,11 @@ async def test_analyze_move_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeMoveResponse() + ) await client.analyze_move(request=None) # Establish that the underlying stub method was called. @@ -14097,14 +15562,14 @@ async def test_query_assets_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.QueryAssetsResponse( + job_reference="job_reference_value", + done=True, + ) + ) await client.query_assets(request=None) # Establish that the underlying stub method was called. @@ -14126,15 +15591,17 @@ async def test_create_saved_query_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) await client.create_saved_query(request=None) # Establish that the underlying stub method was called. @@ -14155,16 +15622,16 @@ async def test_get_saved_query_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) await client.get_saved_query(request=None) # Establish that the underlying stub method was called. @@ -14186,12 +15653,14 @@ async def test_list_saved_queries_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.ListSavedQueriesResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_saved_queries(request=None) # Establish that the underlying stub method was called. @@ -14213,15 +15682,17 @@ async def test_update_saved_query_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.SavedQuery( + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", + ) + ) await client.update_saved_query(request=None) # Establish that the underlying stub method was called. @@ -14243,8 +15714,8 @@ async def test_delete_saved_query_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_saved_query(request=None) @@ -14268,11 +15739,12 @@ async def test_batch_get_effective_iam_policies_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.BatchGetEffectiveIamPoliciesResponse() + ) await client.batch_get_effective_iam_policies(request=None) # Establish that the underlying stub method was called. @@ -14294,12 +15766,14 @@ async def test_analyze_org_policies_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) await client.analyze_org_policies(request=None) # Establish that the underlying stub method was called. @@ -14321,12 +15795,14 @@ async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token="next_page_token_value", + ) + ) await client.analyze_org_policy_governed_containers(request=None) # Establish that the underlying stub method was called. @@ -14348,12 +15824,14 @@ async def test_analyze_org_policy_governed_assets_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token="next_page_token_value", + ) + ) await client.analyze_org_policy_governed_assets(request=None) # Establish that the underlying stub method was called. @@ -14373,18 +15851,20 @@ def test_transport_kind_rest(): def test_export_assets_rest_bad_request(request_type=asset_service.ExportAssetsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14393,30 +15873,32 @@ def test_export_assets_rest_bad_request(request_type=asset_service.ExportAssetsR client.export_assets(request) -@pytest.mark.parametrize("request_type", [ - asset_service.ExportAssetsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ExportAssetsRequest, + dict, + ], +) def test_export_assets_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_assets(request) @@ -14429,20 +15911,32 @@ def test_export_assets_rest_call_success(request_type): def test_export_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_export_assets" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_export_assets_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_export_assets" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) + pb_message = asset_service.ExportAssetsRequest.pb( + asset_service.ExportAssetsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14457,7 +15951,7 @@ def test_export_assets_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.ExportAssetsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -14465,7 +15959,13 @@ def test_export_assets_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.export_assets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -14474,18 +15974,20 @@ def test_export_assets_rest_interceptors(null_interceptor): def test_list_assets_rest_bad_request(request_type=asset_service.ListAssetsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14494,25 +15996,27 @@ def test_list_assets_rest_bad_request(request_type=asset_service.ListAssetsReque client.list_assets(request) -@pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListAssetsRequest, + dict, + ], +) def test_list_assets_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -14522,33 +16026,45 @@ def test_list_assets_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.ListAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_assets(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_assets" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_assets_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_list_assets" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) + pb_message = asset_service.ListAssetsRequest.pb( + asset_service.ListAssetsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14559,11 +16075,13 @@ def test_list_assets_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) + return_value = asset_service.ListAssetsResponse.to_json( + asset_service.ListAssetsResponse() + ) req.return_value.content = return_value request = asset_service.ListAssetsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -14571,27 +16089,37 @@ def test_list_assets_rest_interceptors(null_interceptor): post.return_value = asset_service.ListAssetsResponse() post_with_metadata.return_value = asset_service.ListAssetsResponse(), metadata - client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_assets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_batch_get_assets_history_rest_bad_request(request_type=asset_service.BatchGetAssetsHistoryRequest): +def test_batch_get_assets_history_rest_bad_request( + request_type=asset_service.BatchGetAssetsHistoryRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14600,25 +16128,26 @@ def test_batch_get_assets_history_rest_bad_request(request_type=asset_service.Ba client.batch_get_assets_history(request) -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetAssetsHistoryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.BatchGetAssetsHistoryRequest, + dict, + ], +) def test_batch_get_assets_history_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetAssetsHistoryResponse( - ) + return_value = asset_service.BatchGetAssetsHistoryResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14627,7 +16156,7 @@ def test_batch_get_assets_history_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_assets_history(request) @@ -14640,19 +16169,32 @@ def test_batch_get_assets_history_rest_call_success(request_type): def test_batch_get_assets_history_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_batch_get_assets_history" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_batch_get_assets_history_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) + pb_message = asset_service.BatchGetAssetsHistoryRequest.pb( + asset_service.BatchGetAssetsHistoryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14663,19 +16205,30 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) + return_value = asset_service.BatchGetAssetsHistoryResponse.to_json( + asset_service.BatchGetAssetsHistoryResponse() + ) req.return_value.content = return_value request = asset_service.BatchGetAssetsHistoryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.BatchGetAssetsHistoryResponse() - post_with_metadata.return_value = asset_service.BatchGetAssetsHistoryResponse(), metadata + post_with_metadata.return_value = ( + asset_service.BatchGetAssetsHistoryResponse(), + metadata, + ) - client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.batch_get_assets_history( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -14684,18 +16237,20 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14704,29 +16259,31 @@ def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedReque client.create_feed(request) -@pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.CreateFeedRequest, + dict, + ], +) def test_create_feed_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], ) # Wrap the value into a proper Response obj @@ -14736,37 +16293,49 @@ def test_create_feed_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_feed(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_create_feed" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_create_feed_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_create_feed" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) + pb_message = asset_service.CreateFeedRequest.pb( + asset_service.CreateFeedRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14781,7 +16350,7 @@ def test_create_feed_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.CreateFeedRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -14789,7 +16358,13 @@ def test_create_feed_rest_interceptors(null_interceptor): post.return_value = asset_service.Feed() post_with_metadata.return_value = asset_service.Feed(), metadata - client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_feed( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -14798,18 +16373,20 @@ def test_create_feed_rest_interceptors(null_interceptor): def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {"name": "sample1/sample2/feeds/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14818,29 +16395,31 @@ def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): client.get_feed(request) -@pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.GetFeedRequest, + dict, + ], +) def test_get_feed_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {"name": "sample1/sample2/feeds/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], ) # Wrap the value into a proper Response obj @@ -14850,33 +16429,43 @@ def test_get_feed_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_feed(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_get_feed" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_get_feed_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_get_feed" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -14895,7 +16484,7 @@ def test_get_feed_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.GetFeedRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -14903,7 +16492,13 @@ def test_get_feed_rest_interceptors(null_interceptor): post.return_value = asset_service.Feed() post_with_metadata.return_value = asset_service.Feed(), metadata - client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_feed( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -14912,18 +16507,20 @@ def test_get_feed_rest_interceptors(null_interceptor): def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14932,25 +16529,26 @@ def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest client.list_feeds(request) -@pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListFeedsRequest, + dict, + ], +) def test_list_feeds_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse( - ) + return_value = asset_service.ListFeedsResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -14959,7 +16557,7 @@ def test_list_feeds_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.ListFeedsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_feeds(request) @@ -14972,15 +16570,25 @@ def test_list_feeds_rest_call_success(request_type): def test_list_feeds_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_feeds" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_feeds_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_list_feeds" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -14995,11 +16603,13 @@ def test_list_feeds_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) + return_value = asset_service.ListFeedsResponse.to_json( + asset_service.ListFeedsResponse() + ) req.return_value.content = return_value request = asset_service.ListFeedsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -15007,7 +16617,13 @@ def test_list_feeds_rest_interceptors(null_interceptor): post.return_value = asset_service.ListFeedsResponse() post_with_metadata.return_value = asset_service.ListFeedsResponse(), metadata - client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_feeds( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -15016,18 +16632,20 @@ def test_list_feeds_rest_interceptors(null_interceptor): def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request_init = {"feed": {"name": "sample1/sample2/feeds/sample3"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15036,29 +16654,31 @@ def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedReque client.update_feed(request) -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.UpdateFeedRequest, + dict, + ], +) def test_update_feed_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request_init = {"feed": {"name": "sample1/sample2/feeds/sample3"}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=["relationship_types_value"], ) # Wrap the value into a proper Response obj @@ -15068,37 +16688,49 @@ def test_update_feed_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.Feed.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_feed(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_update_feed" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_update_feed_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_update_feed" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) + pb_message = asset_service.UpdateFeedRequest.pb( + asset_service.UpdateFeedRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15113,7 +16745,7 @@ def test_update_feed_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.UpdateFeedRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -15121,7 +16753,13 @@ def test_update_feed_rest_interceptors(null_interceptor): post.return_value = asset_service.Feed() post_with_metadata.return_value = asset_service.Feed(), metadata - client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_feed( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -15130,18 +16768,20 @@ def test_update_feed_rest_interceptors(null_interceptor): def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {"name": "sample1/sample2/feeds/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15150,30 +16790,32 @@ def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedReque client.delete_feed(request) -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.DeleteFeedRequest, + dict, + ], +) def test_delete_feed_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {"name": "sample1/sample2/feeds/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_feed(request) @@ -15186,15 +16828,23 @@ def test_delete_feed_rest_call_success(request_type): def test_delete_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_delete_feed" + ) as pre, + ): pre.assert_not_called() - pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) + pb_message = asset_service.DeleteFeedRequest.pb( + asset_service.DeleteFeedRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15207,31 +16857,41 @@ def test_delete_feed_rest_interceptors(null_interceptor): req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = asset_service.DeleteFeedRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_feed( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() -def test_search_all_resources_rest_bad_request(request_type=asset_service.SearchAllResourcesRequest): +def test_search_all_resources_rest_bad_request( + request_type=asset_service.SearchAllResourcesRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15240,25 +16900,27 @@ def test_search_all_resources_rest_bad_request(request_type=asset_service.Search client.search_all_resources(request) -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.SearchAllResourcesRequest, + dict, + ], +) def test_search_all_resources_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -15268,33 +16930,46 @@ def test_search_all_resources_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.SearchAllResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_resources(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_search_all_resources_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_search_all_resources" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_search_all_resources_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_search_all_resources" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) + pb_message = asset_service.SearchAllResourcesRequest.pb( + asset_service.SearchAllResourcesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15305,39 +16980,54 @@ def test_search_all_resources_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) + return_value = asset_service.SearchAllResourcesResponse.to_json( + asset_service.SearchAllResourcesResponse() + ) req.return_value.content = return_value request = asset_service.SearchAllResourcesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.SearchAllResourcesResponse() - post_with_metadata.return_value = asset_service.SearchAllResourcesResponse(), metadata + post_with_metadata.return_value = ( + asset_service.SearchAllResourcesResponse(), + metadata, + ) - client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.search_all_resources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.SearchAllIamPoliciesRequest): +def test_search_all_iam_policies_rest_bad_request( + request_type=asset_service.SearchAllIamPoliciesRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15346,25 +17036,27 @@ def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.Sea client.search_all_iam_policies(request) -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.SearchAllIamPoliciesRequest, + dict, + ], +) def test_search_all_iam_policies_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -15374,33 +17066,46 @@ def test_search_all_iam_policies_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_iam_policies(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_search_all_iam_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_search_all_iam_policies" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_search_all_iam_policies_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) + pb_message = asset_service.SearchAllIamPoliciesRequest.pb( + asset_service.SearchAllIamPoliciesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15411,39 +17116,54 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) + return_value = asset_service.SearchAllIamPoliciesResponse.to_json( + asset_service.SearchAllIamPoliciesResponse() + ) req.return_value.content = return_value request = asset_service.SearchAllIamPoliciesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.SearchAllIamPoliciesResponse() - post_with_metadata.return_value = asset_service.SearchAllIamPoliciesResponse(), metadata + post_with_metadata.return_value = ( + asset_service.SearchAllIamPoliciesResponse(), + metadata, + ) - client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.search_all_iam_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyRequest): +def test_analyze_iam_policy_rest_bad_request( + request_type=asset_service.AnalyzeIamPolicyRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {"analysis_query": {"scope": "sample1/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15452,25 +17172,27 @@ def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeI client.analyze_iam_policy(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeIamPolicyRequest, + dict, + ], +) def test_analyze_iam_policy_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {"analysis_query": {"scope": "sample1/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, + fully_explored=True, ) # Wrap the value into a proper Response obj @@ -15480,7 +17202,7 @@ def test_analyze_iam_policy_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy(request) @@ -15494,19 +17216,32 @@ def test_analyze_iam_policy_rest_call_success(request_type): def test_analyze_iam_policy_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_iam_policy" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_iam_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) + pb_message = asset_service.AnalyzeIamPolicyRequest.pb( + asset_service.AnalyzeIamPolicyRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15517,39 +17252,54 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) + return_value = asset_service.AnalyzeIamPolicyResponse.to_json( + asset_service.AnalyzeIamPolicyResponse() + ) req.return_value.content = return_value request = asset_service.AnalyzeIamPolicyRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeIamPolicyResponse() - post_with_metadata.return_value = asset_service.AnalyzeIamPolicyResponse(), metadata + post_with_metadata.return_value = ( + asset_service.AnalyzeIamPolicyResponse(), + metadata, + ) - client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_analyze_iam_policy_longrunning_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +def test_analyze_iam_policy_longrunning_rest_bad_request( + request_type=asset_service.AnalyzeIamPolicyLongrunningRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {"analysis_query": {"scope": "sample1/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15558,30 +17308,32 @@ def test_analyze_iam_policy_longrunning_rest_bad_request(request_type=asset_serv client.analyze_iam_policy_longrunning(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyLongrunningRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeIamPolicyLongrunningRequest, + dict, + ], +) def test_analyze_iam_policy_longrunning_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {"analysis_query": {"scope": "sample1/sample2"}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy_longrunning(request) @@ -15594,20 +17346,34 @@ def test_analyze_iam_policy_longrunning_rest_call_success(request_type): def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_iam_policy_longrunning", + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_iam_policy_longrunning_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) + pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb( + asset_service.AnalyzeIamPolicyLongrunningRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15622,7 +17388,7 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.AnalyzeIamPolicyLongrunningRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -15630,7 +17396,13 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_iam_policy_longrunning( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -15639,18 +17411,20 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} + request_init = {"resource": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15659,25 +17433,26 @@ def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveReq client.analyze_move(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeMoveRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeMoveRequest, + dict, + ], +) def test_analyze_move_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} + request_init = {"resource": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeMoveResponse( - ) + return_value = asset_service.AnalyzeMoveResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -15686,7 +17461,7 @@ def test_analyze_move_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.AnalyzeMoveResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_move(request) @@ -15699,19 +17474,31 @@ def test_analyze_move_rest_call_success(request_type): def test_analyze_move_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_move") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_move" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_move_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_analyze_move" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeMoveRequest.pb(asset_service.AnalyzeMoveRequest()) + pb_message = asset_service.AnalyzeMoveRequest.pb( + asset_service.AnalyzeMoveRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15722,11 +17509,13 @@ def test_analyze_move_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeMoveResponse.to_json(asset_service.AnalyzeMoveResponse()) + return_value = asset_service.AnalyzeMoveResponse.to_json( + asset_service.AnalyzeMoveResponse() + ) req.return_value.content = return_value request = asset_service.AnalyzeMoveRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -15734,7 +17523,13 @@ def test_analyze_move_rest_interceptors(null_interceptor): post.return_value = asset_service.AnalyzeMoveResponse() post_with_metadata.return_value = asset_service.AnalyzeMoveResponse(), metadata - client.analyze_move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_move( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -15743,18 +17538,20 @@ def test_analyze_move_rest_interceptors(null_interceptor): def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15763,26 +17560,28 @@ def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsReq client.query_assets(request) -@pytest.mark.parametrize("request_type", [ - asset_service.QueryAssetsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.QueryAssetsRequest, + dict, + ], +) def test_query_assets_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, + job_reference="job_reference_value", + done=True, ) # Wrap the value into a proper Response obj @@ -15792,14 +17591,14 @@ def test_query_assets_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.QueryAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.query_assets(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' + assert response.job_reference == "job_reference_value" assert response.done is True @@ -15807,19 +17606,31 @@ def test_query_assets_rest_call_success(request_type): def test_query_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_query_assets") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_query_assets" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_query_assets_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_query_assets" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.QueryAssetsRequest.pb(asset_service.QueryAssetsRequest()) + pb_message = asset_service.QueryAssetsRequest.pb( + asset_service.QueryAssetsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -15830,11 +17641,13 @@ def test_query_assets_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.QueryAssetsResponse.to_json(asset_service.QueryAssetsResponse()) + return_value = asset_service.QueryAssetsResponse.to_json( + asset_service.QueryAssetsResponse() + ) req.return_value.content = return_value request = asset_service.QueryAssetsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -15842,27 +17655,37 @@ def test_query_assets_rest_interceptors(null_interceptor): post.return_value = asset_service.QueryAssetsResponse() post_with_metadata.return_value = asset_service.QueryAssetsResponse(), metadata - client.query_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.query_assets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSavedQueryRequest): +def test_create_saved_query_rest_bad_request( + request_type=asset_service.CreateSavedQueryRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -15871,19 +17694,49 @@ def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSa client.create_saved_query(request) -@pytest.mark.parametrize("request_type", [ - asset_service.CreateSavedQueryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.CreateSavedQueryRequest, + dict, + ], +) def test_create_saved_query_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + request_init = {"parent": "sample1/sample2"} + request_init["saved_query"] = { + "name": "name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "creator": "creator_value", + "last_update_time": {}, + "last_updater": "last_updater_value", + "labels": {}, + "content": { + "iam_policy_analysis_query": { + "scope": "scope_value", + "resource_selector": {"full_resource_name": "full_resource_name_value"}, + "identity_selector": {"identity": "identity_value"}, + "access_selector": { + "roles": ["roles_value1", "roles_value2"], + "permissions": ["permissions_value1", "permissions_value2"], + }, + "options": { + "expand_groups": True, + "expand_roles": True, + "expand_resources": True, + "output_resource_edges": True, + "output_group_edges": True, + "analyze_service_account_impersonation": True, + }, + "condition_context": {"access_time": {}}, + } + }, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -15903,7 +17756,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -15917,7 +17770,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER + for field, value in request_init["saved_query"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -15932,12 +17785,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -15950,13 +17807,13 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) # Wrap the value into a proper Response obj @@ -15966,36 +17823,49 @@ def get_message_fields(field): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_saved_query(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_saved_query") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_create_saved_query" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_create_saved_query_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_create_saved_query" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.CreateSavedQueryRequest.pb(asset_service.CreateSavedQueryRequest()) + pb_message = asset_service.CreateSavedQueryRequest.pb( + asset_service.CreateSavedQueryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16010,7 +17880,7 @@ def test_create_saved_query_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.CreateSavedQueryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -16018,27 +17888,37 @@ def test_create_saved_query_rest_interceptors(null_interceptor): post.return_value = asset_service.SavedQuery() post_with_metadata.return_value = asset_service.SavedQuery(), metadata - client.create_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_saved_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQueryRequest): +def test_get_saved_query_rest_bad_request( + request_type=asset_service.GetSavedQueryRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request_init = {"name": "sample1/sample2/savedQueries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16047,28 +17927,30 @@ def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQue client.get_saved_query(request) -@pytest.mark.parametrize("request_type", [ - asset_service.GetSavedQueryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.GetSavedQueryRequest, + dict, + ], +) def test_get_saved_query_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request_init = {"name": "sample1/sample2/savedQueries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) # Wrap the value into a proper Response obj @@ -16078,36 +17960,48 @@ def test_get_saved_query_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_saved_query(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_saved_query") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_get_saved_query" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_get_saved_query_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_get_saved_query" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.GetSavedQueryRequest.pb(asset_service.GetSavedQueryRequest()) + pb_message = asset_service.GetSavedQueryRequest.pb( + asset_service.GetSavedQueryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16122,7 +18016,7 @@ def test_get_saved_query_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.GetSavedQueryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -16130,27 +18024,37 @@ def test_get_saved_query_rest_interceptors(null_interceptor): post.return_value = asset_service.SavedQuery() post_with_metadata.return_value = asset_service.SavedQuery(), metadata - client.get_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_saved_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSavedQueriesRequest): +def test_list_saved_queries_rest_bad_request( + request_type=asset_service.ListSavedQueriesRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16159,25 +18063,27 @@ def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSave client.list_saved_queries(request) -@pytest.mark.parametrize("request_type", [ - asset_service.ListSavedQueriesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.ListSavedQueriesRequest, + dict, + ], +) def test_list_saved_queries_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {"parent": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16187,33 +18093,46 @@ def test_list_saved_queries_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_saved_queries(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSavedQueriesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_saved_queries_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_saved_queries") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_saved_queries" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_list_saved_queries_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_list_saved_queries" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.ListSavedQueriesRequest.pb(asset_service.ListSavedQueriesRequest()) + pb_message = asset_service.ListSavedQueriesRequest.pb( + asset_service.ListSavedQueriesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16224,39 +18143,54 @@ def test_list_saved_queries_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.ListSavedQueriesResponse.to_json(asset_service.ListSavedQueriesResponse()) + return_value = asset_service.ListSavedQueriesResponse.to_json( + asset_service.ListSavedQueriesResponse() + ) req.return_value.content = return_value request = asset_service.ListSavedQueriesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.ListSavedQueriesResponse() - post_with_metadata.return_value = asset_service.ListSavedQueriesResponse(), metadata + post_with_metadata.return_value = ( + asset_service.ListSavedQueriesResponse(), + metadata, + ) - client.list_saved_queries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_saved_queries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSavedQueryRequest): +def test_update_saved_query_rest_bad_request( + request_type=asset_service.UpdateSavedQueryRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request_init = {"saved_query": {"name": "sample1/sample2/savedQueries/sample3"}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16265,19 +18199,49 @@ def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSa client.update_saved_query(request) -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateSavedQueryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.UpdateSavedQueryRequest, + dict, + ], +) def test_update_saved_query_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} - request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + request_init = {"saved_query": {"name": "sample1/sample2/savedQueries/sample3"}} + request_init["saved_query"] = { + "name": "sample1/sample2/savedQueries/sample3", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "creator": "creator_value", + "last_update_time": {}, + "last_updater": "last_updater_value", + "labels": {}, + "content": { + "iam_policy_analysis_query": { + "scope": "scope_value", + "resource_selector": {"full_resource_name": "full_resource_name_value"}, + "identity_selector": {"identity": "identity_value"}, + "access_selector": { + "roles": ["roles_value1", "roles_value2"], + "permissions": ["permissions_value1", "permissions_value2"], + }, + "options": { + "expand_groups": True, + "expand_roles": True, + "expand_resources": True, + "output_resource_edges": True, + "output_group_edges": True, + "analyze_service_account_impersonation": True, + }, + "condition_context": {"access_time": {}}, + } + }, + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -16297,7 +18261,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -16311,7 +18275,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER + for field, value in request_init["saved_query"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -16326,12 +18290,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -16344,13 +18312,13 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) # Wrap the value into a proper Response obj @@ -16360,36 +18328,49 @@ def get_message_fields(field): # Convert return value to protobuf type return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_saved_query(request) # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_saved_query") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_update_saved_query" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_update_saved_query_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_update_saved_query" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.UpdateSavedQueryRequest.pb(asset_service.UpdateSavedQueryRequest()) + pb_message = asset_service.UpdateSavedQueryRequest.pb( + asset_service.UpdateSavedQueryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16404,7 +18385,7 @@ def test_update_saved_query_rest_interceptors(null_interceptor): req.return_value.content = return_value request = asset_service.UpdateSavedQueryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -16412,27 +18393,37 @@ def test_update_saved_query_rest_interceptors(null_interceptor): post.return_value = asset_service.SavedQuery() post_with_metadata.return_value = asset_service.SavedQuery(), metadata - client.update_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_saved_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSavedQueryRequest): +def test_delete_saved_query_rest_bad_request( + request_type=asset_service.DeleteSavedQueryRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request_init = {"name": "sample1/sample2/savedQueries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16441,30 +18432,32 @@ def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSa client.delete_saved_query(request) -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteSavedQueryRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.DeleteSavedQueryRequest, + dict, + ], +) def test_delete_saved_query_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request_init = {"name": "sample1/sample2/savedQueries/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_saved_query(request) @@ -16477,15 +18470,23 @@ def test_delete_saved_query_rest_call_success(request_type): def test_delete_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_saved_query") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_delete_saved_query" + ) as pre, + ): pre.assert_not_called() - pb_message = asset_service.DeleteSavedQueryRequest.pb(asset_service.DeleteSavedQueryRequest()) + pb_message = asset_service.DeleteSavedQueryRequest.pb( + asset_service.DeleteSavedQueryRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16498,31 +18499,41 @@ def test_delete_saved_query_rest_interceptors(null_interceptor): req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = asset_service.DeleteSavedQueryRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_saved_query( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() -def test_batch_get_effective_iam_policies_rest_bad_request(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): +def test_batch_get_effective_iam_policies_rest_bad_request( + request_type=asset_service.BatchGetEffectiveIamPoliciesRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16531,34 +18542,37 @@ def test_batch_get_effective_iam_policies_rest_bad_request(request_type=asset_se client.batch_get_effective_iam_policies(request) -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetEffectiveIamPoliciesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.BatchGetEffectiveIamPoliciesRequest, + dict, + ], +) def test_batch_get_effective_iam_policies_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( - ) + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_effective_iam_policies(request) @@ -16571,19 +18585,34 @@ def test_batch_get_effective_iam_policies_rest_call_success(request_type): def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_batch_get_effective_iam_policies", + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_batch_get_effective_iam_policies_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "pre_batch_get_effective_iam_policies", + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(asset_service.BatchGetEffectiveIamPoliciesRequest()) + pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb( + asset_service.BatchGetEffectiveIamPoliciesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16594,39 +18623,54 @@ def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(asset_service.BatchGetEffectiveIamPoliciesResponse()) + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json( + asset_service.BatchGetEffectiveIamPoliciesResponse() + ) req.return_value.content = return_value request = asset_service.BatchGetEffectiveIamPoliciesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() - post_with_metadata.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse(), metadata + post_with_metadata.return_value = ( + asset_service.BatchGetEffectiveIamPoliciesResponse(), + metadata, + ) - client.batch_get_effective_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.batch_get_effective_iam_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_analyze_org_policies_rest_bad_request(request_type=asset_service.AnalyzeOrgPoliciesRequest): +def test_analyze_org_policies_rest_bad_request( + request_type=asset_service.AnalyzeOrgPoliciesRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16635,25 +18679,27 @@ def test_analyze_org_policies_rest_bad_request(request_type=asset_service.Analyz client.analyze_org_policies(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPoliciesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPoliciesRequest, + dict, + ], +) def test_analyze_org_policies_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16663,33 +18709,46 @@ def test_analyze_org_policies_rest_call_success(request_type): # Convert return value to protobuf type return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policies(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policies") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_org_policies" + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policies_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, "pre_analyze_org_policies" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb(asset_service.AnalyzeOrgPoliciesRequest()) + pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb( + asset_service.AnalyzeOrgPoliciesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16700,39 +18759,54 @@ def test_analyze_org_policies_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeOrgPoliciesResponse.to_json(asset_service.AnalyzeOrgPoliciesResponse()) + return_value = asset_service.AnalyzeOrgPoliciesResponse.to_json( + asset_service.AnalyzeOrgPoliciesResponse() + ) req.return_value.content = return_value request = asset_service.AnalyzeOrgPoliciesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPoliciesResponse() - post_with_metadata.return_value = asset_service.AnalyzeOrgPoliciesResponse(), metadata + post_with_metadata.return_value = ( + asset_service.AnalyzeOrgPoliciesResponse(), + metadata, + ) - client.analyze_org_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_analyze_org_policy_governed_containers_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): +def test_analyze_org_policy_governed_containers_rest_bad_request( + request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16741,25 +18815,27 @@ def test_analyze_org_policy_governed_containers_rest_bad_request(request_type=as client.analyze_org_policy_governed_containers(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + dict, + ], +) def test_analyze_org_policy_governed_containers_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16767,35 +18843,52 @@ def test_analyze_org_policy_governed_containers_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policy_governed_containers(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policy_governed_containers_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_containers") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policy_governed_containers", + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policy_governed_containers_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "pre_analyze_org_policy_governed_containers", + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(asset_service.AnalyzeOrgPolicyGovernedContainersRequest()) + pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb( + asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16806,39 +18899,54 @@ def test_analyze_org_policy_governed_containers_rest_interceptors(null_intercept req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + ) req.return_value.content = return_value request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - post_with_metadata.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse(), metadata + post_with_metadata.return_value = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse(), + metadata, + ) - client.analyze_org_policy_governed_containers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policy_governed_containers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_analyze_org_policy_governed_assets_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): +def test_analyze_org_policy_governed_assets_rest_bad_request( + request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, +): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -16847,25 +18955,27 @@ def test_analyze_org_policy_governed_assets_rest_bad_request(request_type=asset_ client.analyze_org_policy_governed_assets(request) -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + dict, + ], +) def test_analyze_org_policy_governed_assets_rest_call_success(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {"scope": "sample1/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16873,35 +18983,52 @@ def test_analyze_org_policy_governed_assets_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policy_governed_assets(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AssetServiceRestInterceptor(), + ) client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policy_governed_assets", + ) as post, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policy_governed_assets_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AssetServiceRestInterceptor, + "pre_analyze_org_policy_governed_assets", + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(asset_service.AnalyzeOrgPolicyGovernedAssetsRequest()) + pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb( + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -16912,38 +19039,56 @@ def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + ) req.return_value.content = return_value request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - post_with_metadata.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse(), metadata + post_with_metadata.return_value = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse(), + metadata, + ) - client.analyze_org_policy_governed_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policy_governed_assets( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'sample1/sample2/operations/sample3/sample4'}, request) + request = json_format.ParseDict( + {"name": "sample1/sample2/operations/sample3/sample4"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -16952,20 +19097,23 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} + request_init = {"name": "sample1/sample2/operations/sample3/sample4"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -16973,7 +19121,7 @@ def test_get_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -16983,10 +19131,10 @@ def test_get_operation_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + def test_initialize_client_w_rest(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) assert client is not None @@ -17000,9 +19148,7 @@ def test_export_assets_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.export_assets), "__call__") as call: client.export_assets(request=None) # Establish that the underlying stub method was called. @@ -17022,9 +19168,7 @@ def test_list_assets_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_assets), "__call__") as call: client.list_assets(request=None) # Establish that the underlying stub method was called. @@ -17045,8 +19189,8 @@ def test_batch_get_assets_history_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: + type(client.transport.batch_get_assets_history), "__call__" + ) as call: client.batch_get_assets_history(request=None) # Establish that the underlying stub method was called. @@ -17066,9 +19210,7 @@ def test_create_feed_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.create_feed), "__call__") as call: client.create_feed(request=None) # Establish that the underlying stub method was called. @@ -17088,9 +19230,7 @@ def test_get_feed_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.get_feed), "__call__") as call: client.get_feed(request=None) # Establish that the underlying stub method was called. @@ -17110,9 +19250,7 @@ def test_list_feeds_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: + with mock.patch.object(type(client.transport.list_feeds), "__call__") as call: client.list_feeds(request=None) # Establish that the underlying stub method was called. @@ -17132,9 +19270,7 @@ def test_update_feed_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.update_feed), "__call__") as call: client.update_feed(request=None) # Establish that the underlying stub method was called. @@ -17154,9 +19290,7 @@ def test_delete_feed_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_feed), "__call__") as call: client.delete_feed(request=None) # Establish that the underlying stub method was called. @@ -17177,8 +19311,8 @@ def test_search_all_resources_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: + type(client.transport.search_all_resources), "__call__" + ) as call: client.search_all_resources(request=None) # Establish that the underlying stub method was called. @@ -17199,8 +19333,8 @@ def test_search_all_iam_policies_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: + type(client.transport.search_all_iam_policies), "__call__" + ) as call: client.search_all_iam_policies(request=None) # Establish that the underlying stub method was called. @@ -17221,8 +19355,8 @@ def test_analyze_iam_policy_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: + type(client.transport.analyze_iam_policy), "__call__" + ) as call: client.analyze_iam_policy(request=None) # Establish that the underlying stub method was called. @@ -17243,8 +19377,8 @@ def test_analyze_iam_policy_longrunning_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: + type(client.transport.analyze_iam_policy_longrunning), "__call__" + ) as call: client.analyze_iam_policy_longrunning(request=None) # Establish that the underlying stub method was called. @@ -17264,9 +19398,7 @@ def test_analyze_move_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: + with mock.patch.object(type(client.transport.analyze_move), "__call__") as call: client.analyze_move(request=None) # Establish that the underlying stub method was called. @@ -17286,9 +19418,7 @@ def test_query_assets_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: + with mock.patch.object(type(client.transport.query_assets), "__call__") as call: client.query_assets(request=None) # Establish that the underlying stub method was called. @@ -17309,8 +19439,8 @@ def test_create_saved_query_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: + type(client.transport.create_saved_query), "__call__" + ) as call: client.create_saved_query(request=None) # Establish that the underlying stub method was called. @@ -17330,9 +19460,7 @@ def test_get_saved_query_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: + with mock.patch.object(type(client.transport.get_saved_query), "__call__") as call: client.get_saved_query(request=None) # Establish that the underlying stub method was called. @@ -17353,8 +19481,8 @@ def test_list_saved_queries_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: + type(client.transport.list_saved_queries), "__call__" + ) as call: client.list_saved_queries(request=None) # Establish that the underlying stub method was called. @@ -17375,8 +19503,8 @@ def test_update_saved_query_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: + type(client.transport.update_saved_query), "__call__" + ) as call: client.update_saved_query(request=None) # Establish that the underlying stub method was called. @@ -17397,8 +19525,8 @@ def test_delete_saved_query_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: + type(client.transport.delete_saved_query), "__call__" + ) as call: client.delete_saved_query(request=None) # Establish that the underlying stub method was called. @@ -17419,8 +19547,8 @@ def test_batch_get_effective_iam_policies_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: + type(client.transport.batch_get_effective_iam_policies), "__call__" + ) as call: client.batch_get_effective_iam_policies(request=None) # Establish that the underlying stub method was called. @@ -17441,8 +19569,8 @@ def test_analyze_org_policies_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: + type(client.transport.analyze_org_policies), "__call__" + ) as call: client.analyze_org_policies(request=None) # Establish that the underlying stub method was called. @@ -17463,8 +19591,8 @@ def test_analyze_org_policy_governed_containers_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_containers), "__call__" + ) as call: client.analyze_org_policy_governed_containers(request=None) # Establish that the underlying stub method was called. @@ -17485,8 +19613,8 @@ def test_analyze_org_policy_governed_assets_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: + type(client.transport.analyze_org_policy_governed_assets), "__call__" + ) as call: client.analyze_org_policy_governed_assets(request=None) # Establish that the underlying stub method was called. @@ -17507,12 +19635,13 @@ def test_asset_service_rest_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AbstractOperationsClient, + operations_v1.AbstractOperationsClient, ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AssetServiceClient( @@ -17523,18 +19652,21 @@ def test_transport_grpc_default(): transports.AssetServiceGrpcTransport, ) + def test_asset_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.AssetServiceTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_asset_service_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport.__init__') as Transport: + with mock.patch( + "google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.AssetServiceTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -17543,30 +19675,30 @@ def test_asset_service_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'export_assets', - 'list_assets', - 'batch_get_assets_history', - 'create_feed', - 'get_feed', - 'list_feeds', - 'update_feed', - 'delete_feed', - 'search_all_resources', - 'search_all_iam_policies', - 'analyze_iam_policy', - 'analyze_iam_policy_longrunning', - 'analyze_move', - 'query_assets', - 'create_saved_query', - 'get_saved_query', - 'list_saved_queries', - 'update_saved_query', - 'delete_saved_query', - 'batch_get_effective_iam_policies', - 'analyze_org_policies', - 'analyze_org_policy_governed_containers', - 'analyze_org_policy_governed_assets', - 'get_operation', + "export_assets", + "list_assets", + "batch_get_assets_history", + "create_feed", + "get_feed", + "list_feeds", + "update_feed", + "delete_feed", + "search_all_resources", + "search_all_iam_policies", + "analyze_iam_policy", + "analyze_iam_policy_longrunning", + "analyze_move", + "query_assets", + "create_saved_query", + "get_saved_query", + "list_saved_queries", + "update_saved_query", + "delete_saved_query", + "batch_get_effective_iam_policies", + "analyze_org_policies", + "analyze_org_policy_governed_containers", + "analyze_org_policy_governed_assets", + "get_operation", ) for method in methods: with pytest.raises(NotImplementedError): @@ -17582,7 +19714,7 @@ def test_asset_service_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -17591,25 +19723,36 @@ def test_asset_service_base_transport(): def test_asset_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AssetServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_asset_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AssetServiceTransport() @@ -17618,14 +19761,12 @@ def test_asset_service_base_transport_with_adc(): def test_asset_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) AssetServiceClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @@ -17640,12 +19781,12 @@ def test_asset_service_auth_adc(): def test_asset_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @@ -17659,48 +19800,46 @@ def test_asset_service_transport_auth_adc(transport_class): ], ) def test_asset_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.AssetServiceGrpcTransport, grpc_helpers), - (transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async) + (transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_asset_service_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "cloudasset.googleapis.com:443", credentials=creds, credentials_file=None, quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=["1", "2"], default_host="cloudasset.googleapis.com", ssl_credentials=None, @@ -17711,10 +19850,11 @@ def test_asset_service_transport_create_channel(transport_class, grpc_helpers): ) -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport], +) +def test_asset_service_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -17723,7 +19863,7 @@ def test_asset_service_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -17744,61 +19884,77 @@ def test_asset_service_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) + def test_asset_service_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AssetServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AssetServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_asset_service_host_no_port(transport_name): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="cloudasset.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com' + "cloudasset.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudasset.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_asset_service_host_with_port(transport_name): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="cloudasset.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'cloudasset.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com:8000' + "cloudasset.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudasset.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_asset_service_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -17879,8 +20035,10 @@ def test_asset_service_client_transport_session_collision(transport_name): session1 = client1.transport.analyze_org_policy_governed_assets._session session2 = client2.transport.analyze_org_policy_governed_assets._session assert session1 != session2 + + def test_asset_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AssetServiceGrpcTransport( @@ -17893,7 +20051,7 @@ def test_asset_service_grpc_transport_channel(): def test_asset_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AssetServiceGrpcAsyncIOTransport( @@ -17908,12 +20066,17 @@ def test_asset_service_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: +@pytest.mark.parametrize( + "transport_class", + [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport], +) +def test_asset_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -17922,7 +20085,7 @@ def test_asset_service_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -17952,17 +20115,20 @@ def test_asset_service_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport], +) +def test_asset_service_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -17993,7 +20159,7 @@ def test_asset_service_transport_channel_mtls_with_adc( def test_asset_service_grpc_lro_client(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) transport = client.transport @@ -18010,7 +20176,7 @@ def test_asset_service_grpc_lro_client(): def test_asset_service_grpc_lro_async_client(): client = AssetServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', + transport="grpc_asyncio", ) transport = client.transport @@ -18027,7 +20193,10 @@ def test_asset_service_grpc_lro_async_client(): def test_access_level_path(): access_policy = "squid" access_level = "clam" - expected = "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) + expected = "accessPolicies/{access_policy}/accessLevels/{access_level}".format( + access_policy=access_policy, + access_level=access_level, + ) actual = AssetServiceClient.access_level_path(access_policy, access_level) assert expected == actual @@ -18043,9 +20212,12 @@ def test_parse_access_level_path(): actual = AssetServiceClient.parse_access_level_path(path) assert expected == actual + def test_access_policy_path(): access_policy = "oyster" - expected = "accessPolicies/{access_policy}".format(access_policy=access_policy, ) + expected = "accessPolicies/{access_policy}".format( + access_policy=access_policy, + ) actual = AssetServiceClient.access_policy_path(access_policy) assert expected == actual @@ -18060,6 +20232,7 @@ def test_parse_access_policy_path(): actual = AssetServiceClient.parse_access_policy_path(path) assert expected == actual + def test_asset_path(): expected = "*".format() actual = AssetServiceClient.asset_path() @@ -18067,18 +20240,21 @@ def test_asset_path(): def test_parse_asset_path(): - expected = { - } + expected = {} path = AssetServiceClient.asset_path(**expected) # Check that the path construction is reversible. actual = AssetServiceClient.parse_asset_path(path) assert expected == actual + def test_feed_path(): project = "cuttlefish" feed = "mussel" - expected = "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) + expected = "projects/{project}/feeds/{feed}".format( + project=project, + feed=feed, + ) actual = AssetServiceClient.feed_path(project, feed) assert expected == actual @@ -18094,11 +20270,18 @@ def test_parse_feed_path(): actual = AssetServiceClient.parse_feed_path(path) assert expected == actual + def test_inventory_path(): project = "scallop" location = "abalone" instance = "squid" - expected = "projects/{project}/locations/{location}/instances/{instance}/inventory".format(project=project, location=location, instance=instance, ) + expected = ( + "projects/{project}/locations/{location}/instances/{instance}/inventory".format( + project=project, + location=location, + instance=instance, + ) + ) actual = AssetServiceClient.inventory_path(project, location, instance) assert expected == actual @@ -18115,10 +20298,14 @@ def test_parse_inventory_path(): actual = AssetServiceClient.parse_inventory_path(path) assert expected == actual + def test_saved_query_path(): project = "oyster" saved_query = "nudibranch" - expected = "projects/{project}/savedQueries/{saved_query}".format(project=project, saved_query=saved_query, ) + expected = "projects/{project}/savedQueries/{saved_query}".format( + project=project, + saved_query=saved_query, + ) actual = AssetServiceClient.saved_query_path(project, saved_query) assert expected == actual @@ -18134,10 +20321,16 @@ def test_parse_saved_query_path(): actual = AssetServiceClient.parse_saved_query_path(path) assert expected == actual + def test_service_perimeter_path(): access_policy = "winkle" service_perimeter = "nautilus" - expected = "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) + expected = ( + "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format( + access_policy=access_policy, + service_perimeter=service_perimeter, + ) + ) actual = AssetServiceClient.service_perimeter_path(access_policy, service_perimeter) assert expected == actual @@ -18153,9 +20346,12 @@ def test_parse_service_perimeter_path(): actual = AssetServiceClient.parse_service_perimeter_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = AssetServiceClient.common_billing_account_path(billing_account) assert expected == actual @@ -18170,9 +20366,12 @@ def test_parse_common_billing_account_path(): actual = AssetServiceClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = AssetServiceClient.common_folder_path(folder) assert expected == actual @@ -18187,9 +20386,12 @@ def test_parse_common_folder_path(): actual = AssetServiceClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = AssetServiceClient.common_organization_path(organization) assert expected == actual @@ -18204,9 +20406,12 @@ def test_parse_common_organization_path(): actual = AssetServiceClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = AssetServiceClient.common_project_path(project) assert expected == actual @@ -18221,10 +20426,14 @@ def test_parse_common_project_path(): actual = AssetServiceClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = AssetServiceClient.common_location_path(project, location) assert expected == actual @@ -18244,14 +20453,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.AssetServiceTransport, "_prep_wrapped_messages" + ) as prep: client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.AssetServiceTransport, "_prep_wrapped_messages" + ) as prep: transport_class = AssetServiceClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -18262,7 +20475,8 @@ def test_client_with_default_client_info(): def test_get_operation(transport: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -18282,10 +20496,12 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -18330,7 +20546,11 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): @@ -18356,7 +20576,10 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_get_operation_from_dict(): @@ -18375,6 +20598,7 @@ def test_get_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = AssetServiceAsyncClient( @@ -18409,6 +20633,7 @@ def test_get_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.GetOperationRequest() + @pytest.mark.asyncio async def test_get_operation_flattened_async(): client = AssetServiceAsyncClient( @@ -18429,10 +20654,11 @@ async def test_get_operation_flattened_async(): def test_transport_close_grpc(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -18441,10 +20667,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -18452,10 +20679,11 @@ async def test_transport_close_grpc_asyncio(): def test_transport_close_rest(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -18463,13 +20691,12 @@ def test_transport_close_rest(): def test_client_ctx(): transports = [ - 'rest', - 'grpc', + "rest", + "grpc", ] for transport in transports: client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -18478,10 +20705,14 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AssetServiceClient, transports.AssetServiceGrpcTransport), + (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -18496,7 +20727,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py index b931406b0132..a6be00cae63c 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py @@ -28,7 +28,6 @@ import os import shlex import sys -import logging from typing import Any # If extensions (or modules to document with autodoc) are in another directory, @@ -83,9 +82,9 @@ root_doc = "index" # General information about the project. -project = u"google-iam-credentials" -copyright = u"2025, Google, LLC" -author = u"Google APIs" +project = "google-iam-credentials" +copyright = "2025, Google, LLC" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -285,7 +284,7 @@ ( root_doc, "google-iam-credentials.tex", - u"google-iam-credentials Documentation", + "google-iam-credentials Documentation", author, "manual", ) @@ -386,6 +385,7 @@ napoleon_use_param = True napoleon_use_rtype = True + # Setup for sphinx behaviors such as warning filters. class UnexpectedUnindentFilter(logging.Filter): """Filter out warnings about unexpected unindentation following bullet lists.""" @@ -413,5 +413,5 @@ def setup(app: Any) -> None: """ # Sphinx's logger is hierarchical. Adding a filter to the # root 'sphinx' logger will catch warnings from all sub-loggers. - logger = logging.getLogger('sphinx') + logger = logging.getLogger("sphinx") logger.addFilter(UnexpectedUnindentFilter()) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py index 9c0382dd3319..1196554e01ee 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py @@ -18,26 +18,32 @@ __version__ = package_version.__version__ -from google.iam.credentials_v1.services.iam_credentials.client import IAMCredentialsClient -from google.iam.credentials_v1.services.iam_credentials.async_client import IAMCredentialsAsyncClient - -from google.iam.credentials_v1.types.common import GenerateAccessTokenRequest -from google.iam.credentials_v1.types.common import GenerateAccessTokenResponse -from google.iam.credentials_v1.types.common import GenerateIdTokenRequest -from google.iam.credentials_v1.types.common import GenerateIdTokenResponse -from google.iam.credentials_v1.types.common import SignBlobRequest -from google.iam.credentials_v1.types.common import SignBlobResponse -from google.iam.credentials_v1.types.common import SignJwtRequest -from google.iam.credentials_v1.types.common import SignJwtResponse +from google.iam.credentials_v1.services.iam_credentials.async_client import ( + IAMCredentialsAsyncClient, +) +from google.iam.credentials_v1.services.iam_credentials.client import ( + IAMCredentialsClient, +) +from google.iam.credentials_v1.types.common import ( + GenerateAccessTokenRequest, + GenerateAccessTokenResponse, + GenerateIdTokenRequest, + GenerateIdTokenResponse, + SignBlobRequest, + SignBlobResponse, + SignJwtRequest, + SignJwtResponse, +) -__all__ = ('IAMCredentialsClient', - 'IAMCredentialsAsyncClient', - 'GenerateAccessTokenRequest', - 'GenerateAccessTokenResponse', - 'GenerateIdTokenRequest', - 'GenerateIdTokenResponse', - 'SignBlobRequest', - 'SignBlobResponse', - 'SignJwtRequest', - 'SignJwtResponse', +__all__ = ( + "IAMCredentialsClient", + "IAMCredentialsAsyncClient", + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "GenerateIdTokenRequest", + "GenerateIdTokenResponse", + "SignBlobRequest", + "SignBlobResponse", + "SignJwtRequest", + "SignJwtResponse", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index 2890169a652d..501961b434c1 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.iam.credentials_v1 import gapic_version as package_version +import sys import google.api_core as api_core -import sys +from google.iam.credentials_v1 import gapic_version as package_version __version__ = package_version.__version__ @@ -28,45 +28,51 @@ import importlib_metadata as metadata -from .services.iam_credentials import IAMCredentialsClient -from .services.iam_credentials import IAMCredentialsAsyncClient - -from .types.common import GenerateAccessTokenRequest -from .types.common import GenerateAccessTokenResponse -from .types.common import GenerateIdTokenRequest -from .types.common import GenerateIdTokenResponse -from .types.common import SignBlobRequest -from .types.common import SignBlobResponse -from .types.common import SignJwtRequest -from .types.common import SignJwtResponse +from .services.iam_credentials import IAMCredentialsAsyncClient, IAMCredentialsClient +from .types.common import ( + GenerateAccessTokenRequest, + GenerateAccessTokenResponse, + GenerateIdTokenRequest, + GenerateIdTokenResponse, + SignBlobRequest, + SignBlobResponse, + SignJwtRequest, + SignJwtResponse, +) -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.iam.credentials_v1") # type: ignore - api_core.check_dependency_versions("google.iam.credentials_v1") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr( + api_core, "check_dependency_versions" +): # pragma: NO COVER + api_core.check_python_version("google.iam.credentials_v1") # type: ignore + api_core.check_dependency_versions("google.iam.credentials_v1") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: - import warnings import sys + import warnings _py_version_str = sys.version.split()[0] _package_label = "google.iam.credentials_v1" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -104,35 +110,39 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'IAMCredentialsAsyncClient', -'GenerateAccessTokenRequest', -'GenerateAccessTokenResponse', -'GenerateIdTokenRequest', -'GenerateIdTokenResponse', -'IAMCredentialsClient', -'SignBlobRequest', -'SignBlobResponse', -'SignJwtRequest', -'SignJwtResponse', + "IAMCredentialsAsyncClient", + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "GenerateIdTokenRequest", + "GenerateIdTokenResponse", + "IAMCredentialsClient", + "SignBlobRequest", + "SignBlobResponse", + "SignJwtRequest", + "SignJwtResponse", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py index 988290706545..36b57fe4c555 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import IAMCredentialsClient from .async_client import IAMCredentialsAsyncClient +from .client import IAMCredentialsClient __all__ = ( - 'IAMCredentialsClient', - 'IAMCredentialsAsyncClient', + "IAMCredentialsClient", + "IAMCredentialsAsyncClient", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 7852bfe5c199..8287f03cdbbd 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -14,41 +14,53 @@ # limitations under the License. # import logging as std_logging -from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.iam.credentials_v1 import gapic_version as package_version +from collections import OrderedDict +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.iam.credentials_v1 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.iam.credentials_v1.types import common import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport +from google.iam.credentials_v1.types import common + from .client import IAMCredentialsClient +from .transports.base import DEFAULT_CLIENT_INFO, IAMCredentialsTransport +from .transports.grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class IAMCredentialsAsyncClient: """A service account is a special type of Google account that belongs to your application or a virtual machine (VM), instead @@ -72,17 +84,33 @@ class IAMCredentialsAsyncClient: _DEFAULT_UNIVERSE = IAMCredentialsClient._DEFAULT_UNIVERSE service_account_path = staticmethod(IAMCredentialsClient.service_account_path) - parse_service_account_path = staticmethod(IAMCredentialsClient.parse_service_account_path) - common_billing_account_path = staticmethod(IAMCredentialsClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(IAMCredentialsClient.parse_common_billing_account_path) + parse_service_account_path = staticmethod( + IAMCredentialsClient.parse_service_account_path + ) + common_billing_account_path = staticmethod( + IAMCredentialsClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + IAMCredentialsClient.parse_common_billing_account_path + ) common_folder_path = staticmethod(IAMCredentialsClient.common_folder_path) - parse_common_folder_path = staticmethod(IAMCredentialsClient.parse_common_folder_path) - common_organization_path = staticmethod(IAMCredentialsClient.common_organization_path) - parse_common_organization_path = staticmethod(IAMCredentialsClient.parse_common_organization_path) + parse_common_folder_path = staticmethod( + IAMCredentialsClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + IAMCredentialsClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + IAMCredentialsClient.parse_common_organization_path + ) common_project_path = staticmethod(IAMCredentialsClient.common_project_path) - parse_common_project_path = staticmethod(IAMCredentialsClient.parse_common_project_path) + parse_common_project_path = staticmethod( + IAMCredentialsClient.parse_common_project_path + ) common_location_path = staticmethod(IAMCredentialsClient.common_location_path) - parse_common_location_path = staticmethod(IAMCredentialsClient.parse_common_location_path) + parse_common_location_path = staticmethod( + IAMCredentialsClient.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -124,7 +152,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -187,12 +217,16 @@ def universe_domain(self) -> str: get_transport_class = IAMCredentialsClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, IAMCredentialsTransport, Callable[..., IAMCredentialsTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, IAMCredentialsTransport, Callable[..., IAMCredentialsTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the iam credentials async client. Args: @@ -250,34 +284,42 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.iam.credentials_v1.IAMCredentialsAsyncClient`.", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.iam.credentials.v1.IAMCredentials", "credentialsType": None, - } + }, ) - async def generate_access_token(self, - request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - scope: Optional[MutableSequence[str]] = None, - lifetime: Optional[duration_pb2.Duration] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.GenerateAccessTokenResponse: + async def generate_access_token( + self, + request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + scope: Optional[MutableSequence[str]] = None, + lifetime: Optional[duration_pb2.Duration] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service account. @@ -378,10 +420,14 @@ async def sample_generate_access_token(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, delegates, scope, lifetime] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -401,14 +447,14 @@ async def sample_generate_access_token(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.generate_access_token] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_access_token + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -425,17 +471,18 @@ async def sample_generate_access_token(): # Done; return the response. return response - async def generate_id_token(self, - request: Optional[Union[common.GenerateIdTokenRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - audience: Optional[str] = None, - include_email: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.GenerateIdTokenResponse: + async def generate_id_token( + self, + request: Optional[Union[common.GenerateIdTokenRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + audience: Optional[str] = None, + include_email: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service account. @@ -530,10 +577,14 @@ async def sample_generate_id_token(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, delegates, audience, include_email] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -553,14 +604,14 @@ async def sample_generate_id_token(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.generate_id_token] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_id_token + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -577,16 +628,17 @@ async def sample_generate_id_token(): # Done; return the response. return response - async def sign_blob(self, - request: Optional[Union[common.SignBlobRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - payload: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.SignBlobResponse: + async def sign_blob( + self, + request: Optional[Union[common.SignBlobRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[bytes] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed private key. @@ -670,10 +722,14 @@ async def sample_sign_blob(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, delegates, payload] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -691,14 +747,14 @@ async def sample_sign_blob(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.sign_blob] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.sign_blob + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -715,16 +771,17 @@ async def sample_sign_blob(): # Done; return the response. return response - async def sign_jwt(self, - request: Optional[Union[common.SignJwtRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - payload: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.SignJwtResponse: + async def sign_jwt( + self, + request: Optional[Union[common.SignJwtRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed private key. @@ -811,10 +868,14 @@ async def sample_sign_jwt(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, delegates, payload] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -837,9 +898,7 @@ async def sample_sign_jwt(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -862,12 +921,13 @@ async def __aenter__(self) -> "IAMCredentialsAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "IAMCredentialsAsyncClient", -) +__all__ = ("IAMCredentialsAsyncClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index c55d4a2a44bb..87ab1e0a7217 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -13,27 +13,38 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -from http import HTTPStatus import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) -from google.iam.credentials_v1 import gapic_version as package_version - +import google.protobuf from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.credentials_v1 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -42,16 +53,18 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) -from google.iam.credentials_v1.types import common import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO +from google.iam.credentials_v1.types import common + +from .transports.base import DEFAULT_CLIENT_INFO, IAMCredentialsTransport from .transports.grpc import IAMCredentialsGrpcTransport from .transports.grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport from .transports.rest import IAMCredentialsRestTransport @@ -64,14 +77,16 @@ class IAMCredentialsClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[IAMCredentialsTransport]] _transport_registry["grpc"] = IAMCredentialsGrpcTransport _transport_registry["grpc_asyncio"] = IAMCredentialsGrpcAsyncIOTransport _transport_registry["rest"] = IAMCredentialsRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[IAMCredentialsTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[IAMCredentialsTransport]: """Returns an appropriate transport class. Args: @@ -157,14 +172,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -203,8 +220,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: IAMCredentialsClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -221,73 +237,106 @@ def transport(self) -> IAMCredentialsTransport: return self._transport @staticmethod - def service_account_path(project: str,service_account: str,) -> str: + def service_account_path( + project: str, + service_account: str, + ) -> str: """Returns a fully-qualified service_account string.""" - return "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + return "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) @staticmethod - def parse_service_account_path(path: str) -> Dict[str,str]: + def parse_service_account_path(path: str) -> Dict[str, str]: """Parses a service_account path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -319,14 +368,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = IAMCredentialsClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -339,7 +392,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -364,7 +419,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -387,7 +444,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint) -> str: + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: """Return the API endpoint used by the client. Args: @@ -403,17 +462,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = IAMCredentialsClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -449,15 +516,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -490,12 +560,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, IAMCredentialsTransport, Callable[..., IAMCredentialsTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, IAMCredentialsTransport, Callable[..., IAMCredentialsTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the iam credentials client. Args: @@ -553,13 +627,21 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = IAMCredentialsClient._read_environment_variables() - self._client_cert_source = IAMCredentialsClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = IAMCredentialsClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + IAMCredentialsClient._read_environment_variables() + ) + self._client_cert_source = IAMCredentialsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = IAMCredentialsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. @@ -571,7 +653,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -580,30 +664,40 @@ def __init__(self, *, if transport_provided: # transport is a IAMCredentialsTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly." ) self._transport = cast(IAMCredentialsTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - IAMCredentialsClient._get_api_endpoint( + self._api_endpoint = ( + self._api_endpoint + or IAMCredentialsClient._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, - self._use_mtls_endpoint)) + self._use_mtls_endpoint, + ) + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[IAMCredentialsTransport], Callable[..., IAMCredentialsTransport]] = ( + transport_init: Union[ + Type[IAMCredentialsTransport], Callable[..., IAMCredentialsTransport] + ] = ( IAMCredentialsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., IAMCredentialsTransport], transport) @@ -622,31 +716,40 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.iam.credentials_v1.IAMCredentialsClient`.", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.iam.credentials.v1.IAMCredentials", "credentialsType": None, - } + }, ) - def generate_access_token(self, - request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - scope: Optional[MutableSequence[str]] = None, - lifetime: Optional[duration_pb2.Duration] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.GenerateAccessTokenResponse: + def generate_access_token( + self, + request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + scope: Optional[MutableSequence[str]] = None, + lifetime: Optional[duration_pb2.Duration] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service account. @@ -747,10 +850,14 @@ def sample_generate_access_token(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, delegates, scope, lifetime] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -774,9 +881,7 @@ def sample_generate_access_token(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -793,17 +898,18 @@ def sample_generate_access_token(): # Done; return the response. return response - def generate_id_token(self, - request: Optional[Union[common.GenerateIdTokenRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - audience: Optional[str] = None, - include_email: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.GenerateIdTokenResponse: + def generate_id_token( + self, + request: Optional[Union[common.GenerateIdTokenRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + audience: Optional[str] = None, + include_email: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service account. @@ -898,10 +1004,14 @@ def sample_generate_id_token(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, delegates, audience, include_email] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -925,9 +1035,7 @@ def sample_generate_id_token(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -944,16 +1052,17 @@ def sample_generate_id_token(): # Done; return the response. return response - def sign_blob(self, - request: Optional[Union[common.SignBlobRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - payload: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.SignBlobResponse: + def sign_blob( + self, + request: Optional[Union[common.SignBlobRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[bytes] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed private key. @@ -1037,10 +1146,14 @@ def sample_sign_blob(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, delegates, payload] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1062,9 +1175,7 @@ def sample_sign_blob(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1081,16 +1192,17 @@ def sample_sign_blob(): # Done; return the response. return response - def sign_jwt(self, - request: Optional[Union[common.SignJwtRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - payload: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.SignJwtResponse: + def sign_jwt( + self, + request: Optional[Union[common.SignJwtRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed private key. @@ -1177,10 +1289,14 @@ def sample_sign_jwt(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, delegates, payload] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1202,9 +1318,7 @@ def sample_sign_jwt(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1235,16 +1349,11 @@ def __exit__(self, type, value, traceback): self.transport.close() - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "IAMCredentialsClient", -) +__all__ = ("IAMCredentialsClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py index 0214f01a40ac..897cc6ef3890 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py @@ -19,20 +19,18 @@ from .base import IAMCredentialsTransport from .grpc import IAMCredentialsGrpcTransport from .grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport -from .rest import IAMCredentialsRestTransport -from .rest import IAMCredentialsRestInterceptor - +from .rest import IAMCredentialsRestInterceptor, IAMCredentialsRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[IAMCredentialsTransport]] -_transport_registry['grpc'] = IAMCredentialsGrpcTransport -_transport_registry['grpc_asyncio'] = IAMCredentialsGrpcAsyncIOTransport -_transport_registry['rest'] = IAMCredentialsRestTransport +_transport_registry["grpc"] = IAMCredentialsGrpcTransport +_transport_registry["grpc_asyncio"] = IAMCredentialsGrpcAsyncIOTransport +_transport_registry["rest"] = IAMCredentialsRestTransport __all__ = ( - 'IAMCredentialsTransport', - 'IAMCredentialsGrpcTransport', - 'IAMCredentialsGrpcAsyncIOTransport', - 'IAMCredentialsRestTransport', - 'IAMCredentialsRestInterceptor', + "IAMCredentialsTransport", + "IAMCredentialsGrpcTransport", + "IAMCredentialsGrpcAsyncIOTransport", + "IAMCredentialsRestTransport", + "IAMCredentialsRestInterceptor", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 44e4910533ac..035602c95ddc 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -16,20 +16,20 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.iam.credentials_v1 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core +import google.auth # type: ignore +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.iam.credentials_v1 import gapic_version as package_version from google.iam.credentials_v1.types import common +from google.oauth2 import service_account # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -38,24 +38,23 @@ class IAMCredentialsTransport(abc.ABC): """Abstract transport class for IAMCredentials.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = 'iamcredentials.googleapis.com' + DEFAULT_HOST: str = "iamcredentials.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -94,31 +93,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host self._wrapped_methods: Dict[Callable, Callable] = {} @@ -190,51 +201,56 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @property - def generate_access_token(self) -> Callable[ - [common.GenerateAccessTokenRequest], - Union[ - common.GenerateAccessTokenResponse, - Awaitable[common.GenerateAccessTokenResponse] - ]]: + def generate_access_token( + self, + ) -> Callable[ + [common.GenerateAccessTokenRequest], + Union[ + common.GenerateAccessTokenResponse, + Awaitable[common.GenerateAccessTokenResponse], + ], + ]: raise NotImplementedError() @property - def generate_id_token(self) -> Callable[ - [common.GenerateIdTokenRequest], - Union[ - common.GenerateIdTokenResponse, - Awaitable[common.GenerateIdTokenResponse] - ]]: + def generate_id_token( + self, + ) -> Callable[ + [common.GenerateIdTokenRequest], + Union[ + common.GenerateIdTokenResponse, Awaitable[common.GenerateIdTokenResponse] + ], + ]: raise NotImplementedError() @property - def sign_blob(self) -> Callable[ - [common.SignBlobRequest], - Union[ - common.SignBlobResponse, - Awaitable[common.SignBlobResponse] - ]]: + def sign_blob( + self, + ) -> Callable[ + [common.SignBlobRequest], + Union[common.SignBlobResponse, Awaitable[common.SignBlobResponse]], + ]: raise NotImplementedError() @property - def sign_jwt(self) -> Callable[ - [common.SignJwtRequest], - Union[ - common.SignJwtResponse, - Awaitable[common.SignJwtResponse] - ]]: + def sign_jwt( + self, + ) -> Callable[ + [common.SignJwtRequest], + Union[common.SignJwtResponse, Awaitable[common.SignJwtResponse]], + ]: raise NotImplementedError() @property @@ -242,6 +258,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'IAMCredentialsTransport', -) +__all__ = ("IAMCredentialsTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 5979ede85cae..7a31f7226ac1 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -19,22 +19,21 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson +import google.auth # type: ignore import google.protobuf.message - import grpc # type: ignore import proto # type: ignore - +from google.api_core import gapic_v1, grpc_helpers +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.iam.credentials_v1.types import common -from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO +from google.protobuf.json_format import MessageToJson + +from .base import DEFAULT_CLIENT_INFO, IAMCredentialsTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -44,7 +43,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -65,7 +66,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -76,7 +77,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -91,7 +96,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": client_call_details.method, "response": grpc_response, @@ -122,23 +127,26 @@ class IAMCredentialsGrpcTransport(IAMCredentialsTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -265,19 +273,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -313,19 +325,20 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property - def generate_access_token(self) -> Callable[ - [common.GenerateAccessTokenRequest], - common.GenerateAccessTokenResponse]: + def generate_access_token( + self, + ) -> Callable[ + [common.GenerateAccessTokenRequest], common.GenerateAccessTokenResponse + ]: r"""Return a callable for the generate access token method over gRPC. Generates an OAuth 2.0 access token for a service @@ -341,18 +354,18 @@ def generate_access_token(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'generate_access_token' not in self._stubs: - self._stubs['generate_access_token'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken', + if "generate_access_token" not in self._stubs: + self._stubs["generate_access_token"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken", request_serializer=common.GenerateAccessTokenRequest.serialize, response_deserializer=common.GenerateAccessTokenResponse.deserialize, ) - return self._stubs['generate_access_token'] + return self._stubs["generate_access_token"] @property - def generate_id_token(self) -> Callable[ - [common.GenerateIdTokenRequest], - common.GenerateIdTokenResponse]: + def generate_id_token( + self, + ) -> Callable[[common.GenerateIdTokenRequest], common.GenerateIdTokenResponse]: r"""Return a callable for the generate id token method over gRPC. Generates an OpenID Connect ID token for a service @@ -368,18 +381,16 @@ def generate_id_token(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'generate_id_token' not in self._stubs: - self._stubs['generate_id_token'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/GenerateIdToken', + if "generate_id_token" not in self._stubs: + self._stubs["generate_id_token"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/GenerateIdToken", request_serializer=common.GenerateIdTokenRequest.serialize, response_deserializer=common.GenerateIdTokenResponse.deserialize, ) - return self._stubs['generate_id_token'] + return self._stubs["generate_id_token"] @property - def sign_blob(self) -> Callable[ - [common.SignBlobRequest], - common.SignBlobResponse]: + def sign_blob(self) -> Callable[[common.SignBlobRequest], common.SignBlobResponse]: r"""Return a callable for the sign blob method over gRPC. Signs a blob using a service account's system-managed @@ -395,18 +406,16 @@ def sign_blob(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'sign_blob' not in self._stubs: - self._stubs['sign_blob'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/SignBlob', + if "sign_blob" not in self._stubs: + self._stubs["sign_blob"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/SignBlob", request_serializer=common.SignBlobRequest.serialize, response_deserializer=common.SignBlobResponse.deserialize, ) - return self._stubs['sign_blob'] + return self._stubs["sign_blob"] @property - def sign_jwt(self) -> Callable[ - [common.SignJwtRequest], - common.SignJwtResponse]: + def sign_jwt(self) -> Callable[[common.SignJwtRequest], common.SignJwtResponse]: r"""Return a callable for the sign jwt method over gRPC. Signs a JWT using a service account's system-managed @@ -422,13 +431,13 @@ def sign_jwt(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'sign_jwt' not in self._stubs: - self._stubs['sign_jwt'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/SignJwt', + if "sign_jwt" not in self._stubs: + self._stubs["sign_jwt"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/SignJwt", request_serializer=common.SignJwtRequest.serialize, response_deserializer=common.SignJwtResponse.deserialize, ) - return self._stubs['sign_jwt'] + return self._stubs["sign_jwt"] def close(self): self._logged_channel.close() @@ -438,6 +447,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'IAMCredentialsGrpcTransport', -) +__all__ = ("IAMCredentialsGrpcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index fb3658d72b8d..0fccb4baac80 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -15,30 +15,29 @@ # import inspect import json -import pickle import logging as std_logging +import pickle import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.credentials_v1.types import common from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.iam.credentials_v1.types import common -from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, IAMCredentialsTransport from .grpc import IAMCredentialsGrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -46,9 +45,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -69,7 +72,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -80,7 +83,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -95,7 +102,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -131,13 +138,15 @@ class IAMCredentialsGrpcAsyncIOTransport(IAMCredentialsTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -168,24 +177,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -314,7 +325,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -329,9 +342,12 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def generate_access_token(self) -> Callable[ - [common.GenerateAccessTokenRequest], - Awaitable[common.GenerateAccessTokenResponse]]: + def generate_access_token( + self, + ) -> Callable[ + [common.GenerateAccessTokenRequest], + Awaitable[common.GenerateAccessTokenResponse], + ]: r"""Return a callable for the generate access token method over gRPC. Generates an OAuth 2.0 access token for a service @@ -347,18 +363,20 @@ def generate_access_token(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'generate_access_token' not in self._stubs: - self._stubs['generate_access_token'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken', + if "generate_access_token" not in self._stubs: + self._stubs["generate_access_token"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken", request_serializer=common.GenerateAccessTokenRequest.serialize, response_deserializer=common.GenerateAccessTokenResponse.deserialize, ) - return self._stubs['generate_access_token'] + return self._stubs["generate_access_token"] @property - def generate_id_token(self) -> Callable[ - [common.GenerateIdTokenRequest], - Awaitable[common.GenerateIdTokenResponse]]: + def generate_id_token( + self, + ) -> Callable[ + [common.GenerateIdTokenRequest], Awaitable[common.GenerateIdTokenResponse] + ]: r"""Return a callable for the generate id token method over gRPC. Generates an OpenID Connect ID token for a service @@ -374,18 +392,18 @@ def generate_id_token(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'generate_id_token' not in self._stubs: - self._stubs['generate_id_token'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/GenerateIdToken', + if "generate_id_token" not in self._stubs: + self._stubs["generate_id_token"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/GenerateIdToken", request_serializer=common.GenerateIdTokenRequest.serialize, response_deserializer=common.GenerateIdTokenResponse.deserialize, ) - return self._stubs['generate_id_token'] + return self._stubs["generate_id_token"] @property - def sign_blob(self) -> Callable[ - [common.SignBlobRequest], - Awaitable[common.SignBlobResponse]]: + def sign_blob( + self, + ) -> Callable[[common.SignBlobRequest], Awaitable[common.SignBlobResponse]]: r"""Return a callable for the sign blob method over gRPC. Signs a blob using a service account's system-managed @@ -401,18 +419,18 @@ def sign_blob(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'sign_blob' not in self._stubs: - self._stubs['sign_blob'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/SignBlob', + if "sign_blob" not in self._stubs: + self._stubs["sign_blob"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/SignBlob", request_serializer=common.SignBlobRequest.serialize, response_deserializer=common.SignBlobResponse.deserialize, ) - return self._stubs['sign_blob'] + return self._stubs["sign_blob"] @property - def sign_jwt(self) -> Callable[ - [common.SignJwtRequest], - Awaitable[common.SignJwtResponse]]: + def sign_jwt( + self, + ) -> Callable[[common.SignJwtRequest], Awaitable[common.SignJwtResponse]]: r"""Return a callable for the sign jwt method over gRPC. Signs a JWT using a service account's system-managed @@ -428,16 +446,16 @@ def sign_jwt(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'sign_jwt' not in self._stubs: - self._stubs['sign_jwt'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/SignJwt', + if "sign_jwt" not in self._stubs: + self._stubs["sign_jwt"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/SignJwt", request_serializer=common.SignJwtRequest.serialize, response_deserializer=common.SignJwtResponse.deserialize, ) - return self._stubs['sign_jwt'] + return self._stubs["sign_jwt"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.generate_access_token: self._wrap_method( self.generate_access_token, @@ -514,6 +532,4 @@ def kind(self) -> str: return "grpc_asyncio" -__all__ = ( - 'IAMCredentialsGrpcAsyncIOTransport', -) +__all__ = ("IAMCredentialsGrpcAsyncIOTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index b9aa3c4b6f94..ca2154ad578e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -13,31 +13,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging +import dataclasses import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +import google.protobuf from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.iam.credentials_v1.types import common from google.protobuf import json_format - from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -from google.iam.credentials_v1.types import common - - -from .rest_base import _BaseIAMCredentialsRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseIAMCredentialsRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -46,6 +39,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -114,7 +108,14 @@ def post_sign_jwt(self, response): """ - def pre_generate_access_token(self, request: common.GenerateAccessTokenRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateAccessTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_generate_access_token( + self, + request: common.GenerateAccessTokenRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + common.GenerateAccessTokenRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for generate_access_token Override in a subclass to manipulate the request or metadata @@ -122,7 +123,9 @@ def pre_generate_access_token(self, request: common.GenerateAccessTokenRequest, """ return request, metadata - def post_generate_access_token(self, response: common.GenerateAccessTokenResponse) -> common.GenerateAccessTokenResponse: + def post_generate_access_token( + self, response: common.GenerateAccessTokenResponse + ) -> common.GenerateAccessTokenResponse: """Post-rpc interceptor for generate_access_token DEPRECATED. Please use the `post_generate_access_token_with_metadata` @@ -135,7 +138,13 @@ def post_generate_access_token(self, response: common.GenerateAccessTokenRespons """ return response - def post_generate_access_token_with_metadata(self, response: common.GenerateAccessTokenResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateAccessTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_generate_access_token_with_metadata( + self, + response: common.GenerateAccessTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + common.GenerateAccessTokenResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for generate_access_token Override in a subclass to read or manipulate the response or metadata after it @@ -150,7 +159,11 @@ def post_generate_access_token_with_metadata(self, response: common.GenerateAcce """ return response, metadata - def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateIdTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_generate_id_token( + self, + request: common.GenerateIdTokenRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.GenerateIdTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for generate_id_token Override in a subclass to manipulate the request or metadata @@ -158,7 +171,9 @@ def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata """ return request, metadata - def post_generate_id_token(self, response: common.GenerateIdTokenResponse) -> common.GenerateIdTokenResponse: + def post_generate_id_token( + self, response: common.GenerateIdTokenResponse + ) -> common.GenerateIdTokenResponse: """Post-rpc interceptor for generate_id_token DEPRECATED. Please use the `post_generate_id_token_with_metadata` @@ -171,7 +186,11 @@ def post_generate_id_token(self, response: common.GenerateIdTokenResponse) -> co """ return response - def post_generate_id_token_with_metadata(self, response: common.GenerateIdTokenResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateIdTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_generate_id_token_with_metadata( + self, + response: common.GenerateIdTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.GenerateIdTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for generate_id_token Override in a subclass to read or manipulate the response or metadata after it @@ -186,7 +205,11 @@ def post_generate_id_token_with_metadata(self, response: common.GenerateIdTokenR """ return response, metadata - def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignBlobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_sign_blob( + self, + request: common.SignBlobRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.SignBlobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for sign_blob Override in a subclass to manipulate the request or metadata @@ -194,7 +217,9 @@ def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tupl """ return request, metadata - def post_sign_blob(self, response: common.SignBlobResponse) -> common.SignBlobResponse: + def post_sign_blob( + self, response: common.SignBlobResponse + ) -> common.SignBlobResponse: """Post-rpc interceptor for sign_blob DEPRECATED. Please use the `post_sign_blob_with_metadata` @@ -207,7 +232,11 @@ def post_sign_blob(self, response: common.SignBlobResponse) -> common.SignBlobRe """ return response - def post_sign_blob_with_metadata(self, response: common.SignBlobResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignBlobResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_sign_blob_with_metadata( + self, + response: common.SignBlobResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.SignBlobResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for sign_blob Override in a subclass to read or manipulate the response or metadata after it @@ -222,7 +251,11 @@ def post_sign_blob_with_metadata(self, response: common.SignBlobResponse, metada """ return response, metadata - def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignJwtRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_sign_jwt( + self, + request: common.SignJwtRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.SignJwtRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for sign_jwt Override in a subclass to manipulate the request or metadata @@ -243,7 +276,11 @@ def post_sign_jwt(self, response: common.SignJwtResponse) -> common.SignJwtRespo """ return response - def post_sign_jwt_with_metadata(self, response: common.SignJwtResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignJwtResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_sign_jwt_with_metadata( + self, + response: common.SignJwtResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.SignJwtResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for sign_jwt Override in a subclass to read or manipulate the response or metadata after it @@ -287,62 +324,63 @@ class IAMCredentialsRestTransport(_BaseIAMCredentialsRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[IAMCredentialsRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[IAMCredentialsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to (default: 'iamcredentials.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - interceptor (Optional[IAMCredentialsRestInterceptor]): Interceptor used - to manipulate requests, request metadata, and responses. - api_audience (Optional[str]): The intended audience for the API calls - to the service that will be set when using certain 3rd party - authentication flows. Audience is typically a resource identifier. - If not set, the host value will be used as a default. + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'iamcredentials.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[IAMCredentialsRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -354,16 +392,20 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) self._interceptor = interceptor or IAMCredentialsRestInterceptor() self._prep_wrapped_messages(client_info) - class _GenerateAccessToken(_BaseIAMCredentialsRestTransport._BaseGenerateAccessToken, IAMCredentialsRestStub): + class _GenerateAccessToken( + _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken, + IAMCredentialsRestStub, + ): def __hash__(self): return hash("IAMCredentialsRestTransport.GenerateAccessToken") @@ -375,27 +417,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: common.GenerateAccessTokenRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> common.GenerateAccessTokenResponse: + def __call__( + self, + request: common.GenerateAccessTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateAccessTokenResponse: r"""Call the generate access token method over HTTP. Args: @@ -416,30 +460,42 @@ def __call__(self, http_options = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_http_options() - request, metadata = self._interceptor.pre_generate_access_token(request, metadata) - transcoded_request = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_generate_access_token( + request, metadata + ) + transcoded_request = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_transcoded_request( + http_options, request + ) - body = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_request_body_json(transcoded_request) + body = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateAccessToken", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "GenerateAccessToken", "httpRequest": http_request, @@ -448,7 +504,15 @@ def __call__(self, ) # Send the request - response = IAMCredentialsRestTransport._GenerateAccessToken._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = IAMCredentialsRestTransport._GenerateAccessToken._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -463,20 +527,26 @@ def __call__(self, resp = self._interceptor.post_generate_access_token(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_generate_access_token_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_generate_access_token_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = common.GenerateAccessTokenResponse.to_json(response) + response_payload = common.GenerateAccessTokenResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.iam.credentials_v1.IAMCredentialsClient.generate_access_token", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "GenerateAccessToken", "metadata": http_response["headers"], @@ -485,7 +555,9 @@ def __call__(self, ) return resp - class _GenerateIdToken(_BaseIAMCredentialsRestTransport._BaseGenerateIdToken, IAMCredentialsRestStub): + class _GenerateIdToken( + _BaseIAMCredentialsRestTransport._BaseGenerateIdToken, IAMCredentialsRestStub + ): def __hash__(self): return hash("IAMCredentialsRestTransport.GenerateIdToken") @@ -497,27 +569,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: common.GenerateIdTokenRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> common.GenerateIdTokenResponse: + def __call__( + self, + request: common.GenerateIdTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateIdTokenResponse: r"""Call the generate id token method over HTTP. Args: @@ -538,30 +612,42 @@ def __call__(self, http_options = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_http_options() - request, metadata = self._interceptor.pre_generate_id_token(request, metadata) - transcoded_request = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_generate_id_token( + request, metadata + ) + transcoded_request = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_transcoded_request( + http_options, request + ) - body = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_request_body_json(transcoded_request) + body = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateIdToken", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "GenerateIdToken", "httpRequest": http_request, @@ -570,7 +656,15 @@ def __call__(self, ) # Send the request - response = IAMCredentialsRestTransport._GenerateIdToken._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = IAMCredentialsRestTransport._GenerateIdToken._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -585,20 +679,24 @@ def __call__(self, resp = self._interceptor.post_generate_id_token(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_generate_id_token_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_generate_id_token_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = common.GenerateIdTokenResponse.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.iam.credentials_v1.IAMCredentialsClient.generate_id_token", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "GenerateIdToken", "metadata": http_response["headers"], @@ -607,7 +705,9 @@ def __call__(self, ) return resp - class _SignBlob(_BaseIAMCredentialsRestTransport._BaseSignBlob, IAMCredentialsRestStub): + class _SignBlob( + _BaseIAMCredentialsRestTransport._BaseSignBlob, IAMCredentialsRestStub + ): def __hash__(self): return hash("IAMCredentialsRestTransport.SignBlob") @@ -619,27 +719,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: common.SignBlobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> common.SignBlobResponse: + def __call__( + self, + request: common.SignBlobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignBlobResponse: r"""Call the sign blob method over HTTP. Args: @@ -658,32 +760,50 @@ def __call__(self, """ - http_options = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_http_options() + http_options = ( + _BaseIAMCredentialsRestTransport._BaseSignBlob._get_http_options() + ) request, metadata = self._interceptor.pre_sign_blob(request, metadata) - transcoded_request = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseIAMCredentialsRestTransport._BaseSignBlob._get_transcoded_request( + http_options, request + ) + ) - body = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_request_body_json(transcoded_request) + body = ( + _BaseIAMCredentialsRestTransport._BaseSignBlob._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_query_params_json(transcoded_request) + query_params = ( + _BaseIAMCredentialsRestTransport._BaseSignBlob._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignBlob", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "SignBlob", "httpRequest": http_request, @@ -692,7 +812,15 @@ def __call__(self, ) # Send the request - response = IAMCredentialsRestTransport._SignBlob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = IAMCredentialsRestTransport._SignBlob._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -707,20 +835,24 @@ def __call__(self, resp = self._interceptor.post_sign_blob(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_sign_blob_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_sign_blob_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = common.SignBlobResponse.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.iam.credentials_v1.IAMCredentialsClient.sign_blob", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "SignBlob", "metadata": http_response["headers"], @@ -729,7 +861,9 @@ def __call__(self, ) return resp - class _SignJwt(_BaseIAMCredentialsRestTransport._BaseSignJwt, IAMCredentialsRestStub): + class _SignJwt( + _BaseIAMCredentialsRestTransport._BaseSignJwt, IAMCredentialsRestStub + ): def __hash__(self): return hash("IAMCredentialsRestTransport.SignJwt") @@ -741,27 +875,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: common.SignJwtRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> common.SignJwtResponse: + def __call__( + self, + request: common.SignJwtRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignJwtResponse: r"""Call the sign jwt method over HTTP. Args: @@ -780,32 +916,48 @@ def __call__(self, """ - http_options = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_http_options() + http_options = ( + _BaseIAMCredentialsRestTransport._BaseSignJwt._get_http_options() + ) request, metadata = self._interceptor.pre_sign_jwt(request, metadata) - transcoded_request = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseIAMCredentialsRestTransport._BaseSignJwt._get_transcoded_request( + http_options, request + ) + ) - body = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_request_body_json(transcoded_request) + body = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_query_params_json(transcoded_request) + query_params = ( + _BaseIAMCredentialsRestTransport._BaseSignJwt._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignJwt", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "SignJwt", "httpRequest": http_request, @@ -814,7 +966,15 @@ def __call__(self, ) # Send the request - response = IAMCredentialsRestTransport._SignJwt._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = IAMCredentialsRestTransport._SignJwt._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -829,20 +989,24 @@ def __call__(self, resp = self._interceptor.post_sign_jwt(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_sign_jwt_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_sign_jwt_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = common.SignJwtResponse.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.iam.credentials_v1.IAMCredentialsClient.sign_jwt", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "SignJwt", "metadata": http_response["headers"], @@ -852,36 +1016,34 @@ def __call__(self, return resp @property - def generate_access_token(self) -> Callable[ - [common.GenerateAccessTokenRequest], - common.GenerateAccessTokenResponse]: + def generate_access_token( + self, + ) -> Callable[ + [common.GenerateAccessTokenRequest], common.GenerateAccessTokenResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GenerateAccessToken(self._session, self._host, self._interceptor) # type: ignore + return self._GenerateAccessToken(self._session, self._host, self._interceptor) # type: ignore @property - def generate_id_token(self) -> Callable[ - [common.GenerateIdTokenRequest], - common.GenerateIdTokenResponse]: + def generate_id_token( + self, + ) -> Callable[[common.GenerateIdTokenRequest], common.GenerateIdTokenResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GenerateIdToken(self._session, self._host, self._interceptor) # type: ignore + return self._GenerateIdToken(self._session, self._host, self._interceptor) # type: ignore @property - def sign_blob(self) -> Callable[ - [common.SignBlobRequest], - common.SignBlobResponse]: + def sign_blob(self) -> Callable[[common.SignBlobRequest], common.SignBlobResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SignBlob(self._session, self._host, self._interceptor) # type: ignore + return self._SignBlob(self._session, self._host, self._interceptor) # type: ignore @property - def sign_jwt(self) -> Callable[ - [common.SignJwtRequest], - common.SignJwtResponse]: + def sign_jwt(self) -> Callable[[common.SignJwtRequest], common.SignJwtResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SignJwt(self._session, self._host, self._interceptor) # type: ignore + return self._SignJwt(self._session, self._host, self._interceptor) # type: ignore @property def kind(self) -> str: @@ -891,6 +1053,4 @@ def close(self): self._session.close() -__all__=( - 'IAMCredentialsRestTransport', -) +__all__ = ("IAMCredentialsRestTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py index a03907410f78..3dec23e9b534 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py @@ -14,17 +14,14 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - +from google.api_core import gapic_v1, path_template from google.iam.credentials_v1.types import common +from google.protobuf import json_format + +from .base import DEFAULT_CLIENT_INFO, IAMCredentialsTransport class _BaseIAMCredentialsRestTransport(IAMCredentialsTransport): @@ -40,14 +37,16 @@ class _BaseIAMCredentialsRestTransport(IAMCredentialsTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): @@ -71,7 +70,9 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -82,27 +83,31 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseGenerateAccessToken: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:generateAccessToken', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/serviceAccounts/*}:generateAccessToken", + "body": "*", + }, ] return http_options @@ -117,17 +122,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_unset_required_fields( + query_params + ) + ) return query_params @@ -135,20 +146,24 @@ class _BaseGenerateIdToken: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:generateIdToken', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/serviceAccounts/*}:generateIdToken", + "body": "*", + }, ] return http_options @@ -163,17 +178,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_unset_required_fields( + query_params + ) + ) return query_params @@ -181,20 +202,24 @@ class _BaseSignBlob: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:signBlob', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/serviceAccounts/*}:signBlob", + "body": "*", + }, ] return http_options @@ -209,17 +234,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseIAMCredentialsRestTransport._BaseSignBlob._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseIAMCredentialsRestTransport._BaseSignBlob._get_unset_required_fields( + query_params + ) + ) return query_params @@ -227,20 +258,24 @@ class _BaseSignJwt: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:signJwt', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/serviceAccounts/*}:signJwt", + "body": "*", + }, ] return http_options @@ -255,21 +290,25 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseIAMCredentialsRestTransport._BaseSignJwt._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseIAMCredentialsRestTransport._BaseSignJwt._get_unset_required_fields( + query_params + ) + ) return query_params -__all__=( - '_BaseIAMCredentialsRestTransport', -) +__all__ = ("_BaseIAMCredentialsRestTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py index f257b889924c..e3846fefa8fb 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py @@ -25,12 +25,12 @@ ) __all__ = ( - 'GenerateAccessTokenRequest', - 'GenerateAccessTokenResponse', - 'GenerateIdTokenRequest', - 'GenerateIdTokenResponse', - 'SignBlobRequest', - 'SignBlobResponse', - 'SignJwtRequest', - 'SignJwtResponse', + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "GenerateIdTokenRequest", + "GenerateIdTokenResponse", + "SignBlobRequest", + "SignBlobResponse", + "SignJwtRequest", + "SignJwtResponse", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py index 1acdee2180bc..f2f42dd93afe 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py @@ -17,23 +17,21 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.iam.credentials.v1', + package="google.iam.credentials.v1", manifest={ - 'GenerateAccessTokenRequest', - 'GenerateAccessTokenResponse', - 'SignBlobRequest', - 'SignBlobResponse', - 'SignJwtRequest', - 'SignJwtResponse', - 'GenerateIdTokenRequest', - 'GenerateIdTokenResponse', + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "SignBlobRequest", + "SignBlobResponse", + "SignJwtRequest", + "SignJwtResponse", + "GenerateIdTokenRequest", + "GenerateIdTokenResponse", }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py index 17d048bd5d33..1616a02036f5 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py @@ -15,11 +15,9 @@ # import proto # type: ignore - __protobuf__ = proto.module( - package='google.iam.credentials.v1', - manifest={ - }, + package="google.iam.credentials.v1", + manifest={}, ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index a614b73d8480..b6fd85fdf130 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -17,9 +17,8 @@ import pathlib import re import shutil - -from typing import Dict, List import warnings +from typing import Dict, List import nox @@ -154,7 +153,8 @@ def lint(session): # 2. Check formatting session.run( - "ruff", "format", + "ruff", + "format", "--check", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", @@ -167,12 +167,15 @@ def lint(session): @nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): """(Deprecated) Legacy session. Please use 'nox -s format'.""" - session.log("WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future.") + session.log( + "WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future." + ) # Just run the ruff formatter (keeping legacy behavior of only formatting, not sorting imports) session.install(RUFF_VERSION) session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", *LINT_PATHS, @@ -191,8 +194,10 @@ def format(session): # check --select I: Enables strict import sorting # --fix: Applies the changes automatically session.run( - "ruff", "check", - "--select", "I", + "ruff", + "check", + "--select", + "I", "--fix", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length @@ -201,7 +206,8 @@ def format(session): # 3. Run Ruff to format code session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length *LINT_PATHS, @@ -386,8 +392,10 @@ def docs(session): "sphinx-build", "-T", # show full traceback on exception "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + "-b", + "html", # builder + "-d", + os.path.join("docs", "_build", "doctrees", ""), # cache directory # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py index 23cdbcb65d98..05017303872a 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py @@ -41,7 +41,7 @@ async def sample_generate_access_token(): # Initialize request argument(s) request = credentials_v1.GenerateAccessTokenRequest( name="name_value", - scope=['scope_value1', 'scope_value2'], + scope=["scope_value1", "scope_value2"], ) # Make the request @@ -50,4 +50,5 @@ async def sample_generate_access_token(): # Handle the response print(response) + # [END iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py index b7890bd0106e..045e84d0e6e5 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py @@ -41,7 +41,7 @@ def sample_generate_access_token(): # Initialize request argument(s) request = credentials_v1.GenerateAccessTokenRequest( name="name_value", - scope=['scope_value1', 'scope_value2'], + scope=["scope_value1", "scope_value2"], ) # Make the request @@ -50,4 +50,5 @@ def sample_generate_access_token(): # Handle the response print(response) + # [END iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py index 0215ccea7f69..6a7bf6b56b4d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py @@ -50,4 +50,5 @@ async def sample_generate_id_token(): # Handle the response print(response) + # [END iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py index cf2199eae2ca..664b7794a48e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py @@ -50,4 +50,5 @@ def sample_generate_id_token(): # Handle the response print(response) + # [END iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py index 9dd1113be682..19218118f134 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py @@ -41,7 +41,7 @@ async def sample_sign_blob(): # Initialize request argument(s) request = credentials_v1.SignBlobRequest( name="name_value", - payload=b'payload_blob', + payload=b"payload_blob", ) # Make the request @@ -50,4 +50,5 @@ async def sample_sign_blob(): # Handle the response print(response) + # [END iamcredentials_v1_generated_IAMCredentials_SignBlob_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py index 62752da45472..715257d344a9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py @@ -41,7 +41,7 @@ def sample_sign_blob(): # Initialize request argument(s) request = credentials_v1.SignBlobRequest( name="name_value", - payload=b'payload_blob', + payload=b"payload_blob", ) # Make the request @@ -50,4 +50,5 @@ def sample_sign_blob(): # Handle the response print(response) + # [END iamcredentials_v1_generated_IAMCredentials_SignBlob_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py index 3022b82a96d2..5a01a1bfaf3c 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py @@ -50,4 +50,5 @@ async def sample_sign_jwt(): # Handle the response print(response) + # [END iamcredentials_v1_generated_IAMCredentials_SignJwt_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py index 3c6faea9b194..41115a20f45f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py @@ -50,4 +50,5 @@ def sample_sign_jwt(): # Handle the response print(response) + # [END iamcredentials_v1_generated_IAMCredentials_SignJwt_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 66af125ed53b..9e1462704bf9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -17,20 +17,20 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-iam-credentials' +name = "google-iam-credentials" description = "Google Iam Credentials API client library" version = None -with open(os.path.join(package_root, 'google/iam/credentials/gapic_version.py')) as fp: +with open(os.path.join(package_root, "google/iam/credentials/gapic_version.py")) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": @@ -49,8 +49,7 @@ "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf >= 4.25.8, < 8.0.0", ] -extras = { -} +extras = {} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-iam-credentials" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index fa2116cdd719..74657c3e2a6b 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,47 +22,48 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format import json import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc import pytest -from collections.abc import Sequence, Mapping from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import client_options +import google.auth +import google.protobuf.duration_pb2 as duration_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +from google.api_core import ( + client_options, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.iam.credentials_v1.services.iam_credentials import IAMCredentialsAsyncClient -from google.iam.credentials_v1.services.iam_credentials import IAMCredentialsClient -from google.iam.credentials_v1.services.iam_credentials import transports +from google.iam.credentials_v1.services.iam_credentials import ( + IAMCredentialsAsyncClient, + IAMCredentialsClient, + transports, +) from google.iam.credentials_v1.types import common from google.oauth2 import service_account -import google.auth -import google.protobuf.duration_pb2 as duration_pb2 # type: ignore -import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - - CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -76,9 +78,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -86,17 +90,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -108,21 +122,47 @@ def test__get_default_mtls_endpoint(): custom_endpoint = ".custom" assert IAMCredentialsClient._get_default_mtls_endpoint(None) is None - assert IAMCredentialsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert IAMCredentialsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert IAMCredentialsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert IAMCredentialsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert IAMCredentialsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - assert IAMCredentialsClient._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + assert ( + IAMCredentialsClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + IAMCredentialsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + IAMCredentialsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + IAMCredentialsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + IAMCredentialsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + assert ( + IAMCredentialsClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + def test__read_environment_variables(): assert IAMCredentialsClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert IAMCredentialsClient._read_environment_variables() == (True, "auto", None) + assert IAMCredentialsClient._read_environment_variables() == ( + True, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert IAMCredentialsClient._read_environment_variables() == (False, "auto", None) + assert IAMCredentialsClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} @@ -136,27 +176,46 @@ def test__read_environment_variables(): ) else: assert IAMCredentialsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert IAMCredentialsClient._read_environment_variables() == ( False, - "auto", + "never", None, ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert IAMCredentialsClient._read_environment_variables() == (False, "never", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert IAMCredentialsClient._read_environment_variables() == (False, "always", None) + assert IAMCredentialsClient._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert IAMCredentialsClient._read_environment_variables() == (False, "auto", None) + assert IAMCredentialsClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: IAMCredentialsClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert IAMCredentialsClient._read_environment_variables() == (False, "auto", "foo.com") + assert IAMCredentialsClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -165,7 +224,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert IAMCredentialsClient._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -173,7 +234,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert IAMCredentialsClient._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -185,7 +248,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert IAMCredentialsClient._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -197,7 +262,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert IAMCredentialsClient._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -209,7 +276,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert IAMCredentialsClient._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -224,83 +293,167 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): IAMCredentialsClient._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert IAMCredentialsClient._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert IAMCredentialsClient._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert IAMCredentialsClient._get_client_cert_source(None, False) is None - assert IAMCredentialsClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert IAMCredentialsClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + IAMCredentialsClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + IAMCredentialsClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert IAMCredentialsClient._get_client_cert_source(None, True) is mock_default_cert_source - assert IAMCredentialsClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + IAMCredentialsClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + IAMCredentialsClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) -@mock.patch.object(IAMCredentialsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsClient)) -@mock.patch.object(IAMCredentialsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsAsyncClient)) + +@mock.patch.object( + IAMCredentialsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(IAMCredentialsClient), +) +@mock.patch.object( + IAMCredentialsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(IAMCredentialsAsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = IAMCredentialsClient._DEFAULT_UNIVERSE - default_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert IAMCredentialsClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert IAMCredentialsClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT - assert IAMCredentialsClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert IAMCredentialsClient._get_api_endpoint(None, None, default_universe, "always") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT - assert IAMCredentialsClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT - assert IAMCredentialsClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert IAMCredentialsClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + IAMCredentialsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + IAMCredentialsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + IAMCredentialsClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + IAMCredentialsClient._get_api_endpoint(None, None, default_universe, "always") + == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + IAMCredentialsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + IAMCredentialsClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + IAMCredentialsClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - IAMCredentialsClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + IAMCredentialsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert IAMCredentialsClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert IAMCredentialsClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert IAMCredentialsClient._get_universe_domain(None, None) == IAMCredentialsClient._DEFAULT_UNIVERSE + assert ( + IAMCredentialsClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + IAMCredentialsClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + IAMCredentialsClient._get_universe_domain(None, None) + == IAMCredentialsClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: IAMCredentialsClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -316,7 +469,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -329,14 +483,20 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (IAMCredentialsClient, "grpc"), - (IAMCredentialsAsyncClient, "grpc_asyncio"), - (IAMCredentialsClient, "rest"), -]) + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (IAMCredentialsClient, "grpc"), + (IAMCredentialsAsyncClient, "grpc_asyncio"), + (IAMCredentialsClient, "rest"), + ], +) def test_iam_credentials_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -344,52 +504,68 @@ def test_iam_credentials_client_from_service_account_info(client_class, transpor assert isinstance(client, client_class) assert client.transport._host == ( - 'iamcredentials.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://iamcredentials.googleapis.com' + "iamcredentials.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iamcredentials.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.IAMCredentialsGrpcTransport, "grpc"), - (transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.IAMCredentialsRestTransport, "rest"), -]) -def test_iam_credentials_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.IAMCredentialsGrpcTransport, "grpc"), + (transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.IAMCredentialsRestTransport, "rest"), + ], +) +def test_iam_credentials_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (IAMCredentialsClient, "grpc"), - (IAMCredentialsAsyncClient, "grpc_asyncio"), - (IAMCredentialsClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (IAMCredentialsClient, "grpc"), + (IAMCredentialsAsyncClient, "grpc_asyncio"), + (IAMCredentialsClient, "rest"), + ], +) def test_iam_credentials_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'iamcredentials.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://iamcredentials.googleapis.com' + "iamcredentials.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iamcredentials.googleapis.com" ) @@ -405,30 +581,45 @@ def test_iam_credentials_client_get_transport_class(): assert transport == transports.IAMCredentialsGrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), - (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), - (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest"), -]) -@mock.patch.object(IAMCredentialsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsClient)) -@mock.patch.object(IAMCredentialsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsAsyncClient)) -def test_iam_credentials_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), + ( + IAMCredentialsAsyncClient, + transports.IAMCredentialsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest"), + ], +) +@mock.patch.object( + IAMCredentialsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(IAMCredentialsClient), +) +@mock.patch.object( + IAMCredentialsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(IAMCredentialsAsyncClient), +) +def test_iam_credentials_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(IAMCredentialsClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(IAMCredentialsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(IAMCredentialsClient, 'get_transport_class') as gtc: + with mock.patch.object(IAMCredentialsClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -446,13 +637,15 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -464,7 +657,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -484,17 +677,22 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -503,48 +701,82 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", "true"), - (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", "false"), - (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest", "true"), - (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest", "false"), -]) -@mock.patch.object(IAMCredentialsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsClient)) -@mock.patch.object(IAMCredentialsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsAsyncClient)) + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", "true"), + ( + IAMCredentialsAsyncClient, + transports.IAMCredentialsGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", "false"), + ( + IAMCredentialsAsyncClient, + transports.IAMCredentialsGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest", "true"), + (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + IAMCredentialsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(IAMCredentialsClient), +) +@mock.patch.object( + IAMCredentialsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(IAMCredentialsAsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_iam_credentials_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -563,12 +795,22 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -589,15 +831,22 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -607,19 +856,31 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra ) -@pytest.mark.parametrize("client_class", [ - IAMCredentialsClient, IAMCredentialsAsyncClient -]) -@mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) -@mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) +@pytest.mark.parametrize( + "client_class", [IAMCredentialsClient, IAMCredentialsAsyncClient] +) +@mock.patch.object( + IAMCredentialsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IAMCredentialsClient), +) +@mock.patch.object( + IAMCredentialsAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(IAMCredentialsAsyncClient), +) def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -627,18 +888,25 @@ def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -675,23 +943,23 @@ def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -722,23 +990,23 @@ def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -754,16 +1022,27 @@ def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -773,27 +1052,50 @@ def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + -@pytest.mark.parametrize("client_class", [ - IAMCredentialsClient, IAMCredentialsAsyncClient -]) -@mock.patch.object(IAMCredentialsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsClient)) -@mock.patch.object(IAMCredentialsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsAsyncClient)) +@pytest.mark.parametrize( + "client_class", [IAMCredentialsClient, IAMCredentialsAsyncClient] +) +@mock.patch.object( + IAMCredentialsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(IAMCredentialsClient), +) +@mock.patch.object( + IAMCredentialsAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(IAMCredentialsAsyncClient), +) def test_iam_credentials_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = IAMCredentialsClient._DEFAULT_UNIVERSE - default_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -816,11 +1118,19 @@ def test_iam_credentials_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -828,27 +1138,40 @@ def test_iam_credentials_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), - (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), - (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest"), -]) -def test_iam_credentials_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), + ( + IAMCredentialsAsyncClient, + transports.IAMCredentialsGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest"), + ], +) +def test_iam_credentials_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -857,24 +1180,40 @@ def test_iam_credentials_client_client_options_scopes(client_class, transport_cl api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", grpc_helpers), - (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest", None), -]) -def test_iam_credentials_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + IAMCredentialsClient, + transports.IAMCredentialsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + IAMCredentialsAsyncClient, + transports.IAMCredentialsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest", None), + ], +) +def test_iam_credentials_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -883,11 +1222,14 @@ def test_iam_credentials_client_client_options_credentials_file(client_class, tr api_audience=None, ) + def test_iam_credentials_client_client_options_from_dict(): - with mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsGrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsGrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None client = IAMCredentialsClient( - client_options={'api_endpoint': 'squid.clam.whelk'} + client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( credentials=None, @@ -902,23 +1244,38 @@ def test_iam_credentials_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", grpc_helpers), - (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_iam_credentials_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + IAMCredentialsClient, + transports.IAMCredentialsGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + IAMCredentialsAsyncClient, + transports.IAMCredentialsGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_iam_credentials_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -928,13 +1285,13 @@ def test_iam_credentials_client_create_channel_credentials_file(client_class, tr ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -945,9 +1302,7 @@ def test_iam_credentials_client_create_channel_credentials_file(client_class, tr credentials=file_creds, credentials_file=None, quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=None, default_host="iamcredentials.googleapis.com", ssl_credentials=None, @@ -958,11 +1313,14 @@ def test_iam_credentials_client_create_channel_credentials_file(client_class, tr ) -@pytest.mark.parametrize("request_type", [ - common.GenerateAccessTokenRequest, - dict, -]) -def test_generate_access_token(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + common.GenerateAccessTokenRequest, + dict, + ], +) +def test_generate_access_token(request_type, transport: str = "grpc"): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -974,11 +1332,11 @@ def test_generate_access_token(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: + type(client.transport.generate_access_token), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = common.GenerateAccessTokenResponse( - access_token='access_token_value', + access_token="access_token_value", ) response = client.generate_access_token(request) @@ -990,7 +1348,7 @@ def test_generate_access_token(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateAccessTokenResponse) - assert response.access_token == 'access_token_value' + assert response.access_token == "access_token_value" def test_generate_access_token_non_empty_request_with_auto_populated_field(): @@ -998,28 +1356,31 @@ def test_generate_access_token_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = common.GenerateAccessTokenRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.generate_access_token), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.generate_access_token(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateAccessTokenRequest( - name='name_value', + name="name_value", ) + def test_generate_access_token_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1034,12 +1395,19 @@ def test_generate_access_token_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.generate_access_token in client._transport._wrapped_methods + assert ( + client._transport.generate_access_token + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.generate_access_token] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.generate_access_token] = ( + mock_rpc + ) request = {} client.generate_access_token(request) @@ -1052,8 +1420,11 @@ def test_generate_access_token_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_generate_access_token_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_generate_access_token_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1067,12 +1438,17 @@ async def test_generate_access_token_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.generate_access_token in client._client._transport._wrapped_methods + assert ( + client._client._transport.generate_access_token + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.generate_access_token] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.generate_access_token + ] = mock_rpc request = {} await client.generate_access_token(request) @@ -1086,8 +1462,11 @@ async def test_generate_access_token_async_use_cached_wrapped_rpc(transport: str assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_generate_access_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateAccessTokenRequest): +async def test_generate_access_token_async( + transport: str = "grpc_asyncio", request_type=common.GenerateAccessTokenRequest +): client = IAMCredentialsAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1099,12 +1478,14 @@ async def test_generate_access_token_async(transport: str = 'grpc_asyncio', requ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: + type(client.transport.generate_access_token), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse( - access_token='access_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.GenerateAccessTokenResponse( + access_token="access_token_value", + ) + ) response = await client.generate_access_token(request) # Establish that the underlying gRPC stub method was called. @@ -1115,13 +1496,14 @@ async def test_generate_access_token_async(transport: str = 'grpc_asyncio', requ # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateAccessTokenResponse) - assert response.access_token == 'access_token_value' + assert response.access_token == "access_token_value" @pytest.mark.asyncio async def test_generate_access_token_async_from_dict(): await test_generate_access_token_async(request_type=dict) + def test_generate_access_token_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1131,12 +1513,12 @@ def test_generate_access_token_field_headers(): # a field header. Set these to a non-empty value. request = common.GenerateAccessTokenRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: + type(client.transport.generate_access_token), "__call__" + ) as call: call.return_value = common.GenerateAccessTokenResponse() client.generate_access_token(request) @@ -1148,9 +1530,9 @@ def test_generate_access_token_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1163,13 +1545,15 @@ async def test_generate_access_token_field_headers_async(): # a field header. Set these to a non-empty value. request = common.GenerateAccessTokenRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse()) + type(client.transport.generate_access_token), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.GenerateAccessTokenResponse() + ) await client.generate_access_token(request) # Establish that the underlying gRPC stub method was called. @@ -1180,9 +1564,9 @@ async def test_generate_access_token_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_generate_access_token_flattened(): @@ -1192,16 +1576,16 @@ def test_generate_access_token_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: + type(client.transport.generate_access_token), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = common.GenerateAccessTokenResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.generate_access_token( - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) @@ -1210,15 +1594,17 @@ def test_generate_access_token_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].scope - mock_val = ['scope_value'] + mock_val = ["scope_value"] assert arg == mock_val - assert DurationRule().to_proto(args[0].lifetime) == duration_pb2.Duration(seconds=751) + assert DurationRule().to_proto(args[0].lifetime) == duration_pb2.Duration( + seconds=751 + ) def test_generate_access_token_flattened_error(): @@ -1231,12 +1617,13 @@ def test_generate_access_token_flattened_error(): with pytest.raises(ValueError): client.generate_access_token( common.GenerateAccessTokenRequest(), - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) + @pytest.mark.asyncio async def test_generate_access_token_flattened_async(): client = IAMCredentialsAsyncClient( @@ -1245,18 +1632,20 @@ async def test_generate_access_token_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: + type(client.transport.generate_access_token), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = common.GenerateAccessTokenResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.GenerateAccessTokenResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.generate_access_token( - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) @@ -1265,15 +1654,18 @@ async def test_generate_access_token_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].scope - mock_val = ['scope_value'] + mock_val = ["scope_value"] assert arg == mock_val - assert DurationRule().to_proto(args[0].lifetime) == duration_pb2.Duration(seconds=751) + assert DurationRule().to_proto(args[0].lifetime) == duration_pb2.Duration( + seconds=751 + ) + @pytest.mark.asyncio async def test_generate_access_token_flattened_error_async(): @@ -1286,18 +1678,21 @@ async def test_generate_access_token_flattened_error_async(): with pytest.raises(ValueError): await client.generate_access_token( common.GenerateAccessTokenRequest(), - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) -@pytest.mark.parametrize("request_type", [ - common.GenerateIdTokenRequest, - dict, -]) -def test_generate_id_token(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + common.GenerateIdTokenRequest, + dict, + ], +) +def test_generate_id_token(request_type, transport: str = "grpc"): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1309,11 +1704,11 @@ def test_generate_id_token(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: + type(client.transport.generate_id_token), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = common.GenerateIdTokenResponse( - token='token_value', + token="token_value", ) response = client.generate_id_token(request) @@ -1325,7 +1720,7 @@ def test_generate_id_token(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateIdTokenResponse) - assert response.token == 'token_value' + assert response.token == "token_value" def test_generate_id_token_non_empty_request_with_auto_populated_field(): @@ -1333,30 +1728,33 @@ def test_generate_id_token_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = common.GenerateIdTokenRequest( - name='name_value', - audience='audience_value', + name="name_value", + audience="audience_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.generate_id_token), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.generate_id_token(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateIdTokenRequest( - name='name_value', - audience='audience_value', + name="name_value", + audience="audience_value", ) + def test_generate_id_token_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1375,8 +1773,12 @@ def test_generate_id_token_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.generate_id_token] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.generate_id_token] = ( + mock_rpc + ) request = {} client.generate_id_token(request) @@ -1389,8 +1791,11 @@ def test_generate_id_token_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_generate_id_token_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_generate_id_token_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1404,12 +1809,17 @@ async def test_generate_id_token_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.generate_id_token in client._client._transport._wrapped_methods + assert ( + client._client._transport.generate_id_token + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.generate_id_token] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.generate_id_token + ] = mock_rpc request = {} await client.generate_id_token(request) @@ -1423,8 +1833,11 @@ async def test_generate_id_token_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateIdTokenRequest): +async def test_generate_id_token_async( + transport: str = "grpc_asyncio", request_type=common.GenerateIdTokenRequest +): client = IAMCredentialsAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1436,12 +1849,14 @@ async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: + type(client.transport.generate_id_token), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse( - token='token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.GenerateIdTokenResponse( + token="token_value", + ) + ) response = await client.generate_id_token(request) # Establish that the underlying gRPC stub method was called. @@ -1452,13 +1867,14 @@ async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateIdTokenResponse) - assert response.token == 'token_value' + assert response.token == "token_value" @pytest.mark.asyncio async def test_generate_id_token_async_from_dict(): await test_generate_id_token_async(request_type=dict) + def test_generate_id_token_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1468,12 +1884,12 @@ def test_generate_id_token_field_headers(): # a field header. Set these to a non-empty value. request = common.GenerateIdTokenRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: + type(client.transport.generate_id_token), "__call__" + ) as call: call.return_value = common.GenerateIdTokenResponse() client.generate_id_token(request) @@ -1485,9 +1901,9 @@ def test_generate_id_token_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1500,13 +1916,15 @@ async def test_generate_id_token_field_headers_async(): # a field header. Set these to a non-empty value. request = common.GenerateIdTokenRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse()) + type(client.transport.generate_id_token), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.GenerateIdTokenResponse() + ) await client.generate_id_token(request) # Establish that the underlying gRPC stub method was called. @@ -1517,9 +1935,9 @@ async def test_generate_id_token_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_generate_id_token_flattened(): @@ -1529,16 +1947,16 @@ def test_generate_id_token_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: + type(client.transport.generate_id_token), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = common.GenerateIdTokenResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.generate_id_token( - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) @@ -1547,13 +1965,13 @@ def test_generate_id_token_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].audience - mock_val = 'audience_value' + mock_val = "audience_value" assert arg == mock_val arg = args[0].include_email mock_val = True @@ -1570,12 +1988,13 @@ def test_generate_id_token_flattened_error(): with pytest.raises(ValueError): client.generate_id_token( common.GenerateIdTokenRequest(), - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) + @pytest.mark.asyncio async def test_generate_id_token_flattened_async(): client = IAMCredentialsAsyncClient( @@ -1584,18 +2003,20 @@ async def test_generate_id_token_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: + type(client.transport.generate_id_token), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = common.GenerateIdTokenResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.GenerateIdTokenResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.generate_id_token( - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) @@ -1604,18 +2025,19 @@ async def test_generate_id_token_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].audience - mock_val = 'audience_value' + mock_val = "audience_value" assert arg == mock_val arg = args[0].include_email mock_val = True assert arg == mock_val + @pytest.mark.asyncio async def test_generate_id_token_flattened_error_async(): client = IAMCredentialsAsyncClient( @@ -1627,18 +2049,21 @@ async def test_generate_id_token_flattened_error_async(): with pytest.raises(ValueError): await client.generate_id_token( common.GenerateIdTokenRequest(), - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) -@pytest.mark.parametrize("request_type", [ - common.SignBlobRequest, - dict, -]) -def test_sign_blob(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + common.SignBlobRequest, + dict, + ], +) +def test_sign_blob(request_type, transport: str = "grpc"): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1649,13 +2074,11 @@ def test_sign_blob(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_blob), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = common.SignBlobResponse( - key_id='key_id_value', - signed_blob=b'signed_blob_blob', + key_id="key_id_value", + signed_blob=b"signed_blob_blob", ) response = client.sign_blob(request) @@ -1667,8 +2090,8 @@ def test_sign_blob(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, common.SignBlobResponse) - assert response.key_id == 'key_id_value' - assert response.signed_blob == b'signed_blob_blob' + assert response.key_id == "key_id_value" + assert response.signed_blob == b"signed_blob_blob" def test_sign_blob_non_empty_request_with_auto_populated_field(): @@ -1676,28 +2099,29 @@ def test_sign_blob_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = common.SignBlobRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.sign_blob), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.sign_blob(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.SignBlobRequest( - name='name_value', + name="name_value", ) + def test_sign_blob_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1716,7 +2140,9 @@ def test_sign_blob_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.sign_blob] = mock_rpc request = {} client.sign_blob(request) @@ -1730,6 +2156,7 @@ def test_sign_blob_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1745,12 +2172,17 @@ async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asy wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.sign_blob in client._client._transport._wrapped_methods + assert ( + client._client._transport.sign_blob + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.sign_blob] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.sign_blob + ] = mock_rpc request = {} await client.sign_blob(request) @@ -1764,8 +2196,11 @@ async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asy assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=common.SignBlobRequest): +async def test_sign_blob_async( + transport: str = "grpc_asyncio", request_type=common.SignBlobRequest +): client = IAMCredentialsAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1776,14 +2211,14 @@ async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=com request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_blob), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse( - key_id='key_id_value', - signed_blob=b'signed_blob_blob', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.SignBlobResponse( + key_id="key_id_value", + signed_blob=b"signed_blob_blob", + ) + ) response = await client.sign_blob(request) # Establish that the underlying gRPC stub method was called. @@ -1794,14 +2229,15 @@ async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=com # Establish that the response is the type that we expect. assert isinstance(response, common.SignBlobResponse) - assert response.key_id == 'key_id_value' - assert response.signed_blob == b'signed_blob_blob' + assert response.key_id == "key_id_value" + assert response.signed_blob == b"signed_blob_blob" @pytest.mark.asyncio async def test_sign_blob_async_from_dict(): await test_sign_blob_async(request_type=dict) + def test_sign_blob_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1811,12 +2247,10 @@ def test_sign_blob_field_headers(): # a field header. Set these to a non-empty value. request = common.SignBlobRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_blob), "__call__") as call: call.return_value = common.SignBlobResponse() client.sign_blob(request) @@ -1828,9 +2262,9 @@ def test_sign_blob_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1843,13 +2277,13 @@ async def test_sign_blob_field_headers_async(): # a field header. Set these to a non-empty value. request = common.SignBlobRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse()) + with mock.patch.object(type(client.transport.sign_blob), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.SignBlobResponse() + ) await client.sign_blob(request) # Establish that the underlying gRPC stub method was called. @@ -1860,9 +2294,9 @@ async def test_sign_blob_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_sign_blob_flattened(): @@ -1871,17 +2305,15 @@ def test_sign_blob_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_blob), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = common.SignBlobResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.sign_blob( - name='name_value', - delegates=['delegates_value'], - payload=b'payload_blob', + name="name_value", + delegates=["delegates_value"], + payload=b"payload_blob", ) # Establish that the underlying call was made with the expected @@ -1889,13 +2321,13 @@ def test_sign_blob_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].payload - mock_val = b'payload_blob' + mock_val = b"payload_blob" assert arg == mock_val @@ -1909,11 +2341,12 @@ def test_sign_blob_flattened_error(): with pytest.raises(ValueError): client.sign_blob( common.SignBlobRequest(), - name='name_value', - delegates=['delegates_value'], - payload=b'payload_blob', + name="name_value", + delegates=["delegates_value"], + payload=b"payload_blob", ) + @pytest.mark.asyncio async def test_sign_blob_flattened_async(): client = IAMCredentialsAsyncClient( @@ -1921,19 +2354,19 @@ async def test_sign_blob_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_blob), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = common.SignBlobResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.SignBlobResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.sign_blob( - name='name_value', - delegates=['delegates_value'], - payload=b'payload_blob', + name="name_value", + delegates=["delegates_value"], + payload=b"payload_blob", ) # Establish that the underlying call was made with the expected @@ -1941,15 +2374,16 @@ async def test_sign_blob_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].payload - mock_val = b'payload_blob' + mock_val = b"payload_blob" assert arg == mock_val + @pytest.mark.asyncio async def test_sign_blob_flattened_error_async(): client = IAMCredentialsAsyncClient( @@ -1961,17 +2395,20 @@ async def test_sign_blob_flattened_error_async(): with pytest.raises(ValueError): await client.sign_blob( common.SignBlobRequest(), - name='name_value', - delegates=['delegates_value'], - payload=b'payload_blob', + name="name_value", + delegates=["delegates_value"], + payload=b"payload_blob", ) -@pytest.mark.parametrize("request_type", [ - common.SignJwtRequest, - dict, -]) -def test_sign_jwt(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + common.SignJwtRequest, + dict, + ], +) +def test_sign_jwt(request_type, transport: str = "grpc"): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1982,13 +2419,11 @@ def test_sign_jwt(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_jwt), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', + key_id="key_id_value", + signed_jwt="signed_jwt_value", ) response = client.sign_jwt(request) @@ -2000,8 +2435,8 @@ def test_sign_jwt(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, common.SignJwtResponse) - assert response.key_id == 'key_id_value' - assert response.signed_jwt == 'signed_jwt_value' + assert response.key_id == "key_id_value" + assert response.signed_jwt == "signed_jwt_value" def test_sign_jwt_non_empty_request_with_auto_populated_field(): @@ -2009,30 +2444,31 @@ def test_sign_jwt_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = common.SignJwtRequest( - name='name_value', - payload='payload_value', + name="name_value", + payload="payload_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.sign_jwt), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.sign_jwt(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.SignJwtRequest( - name='name_value', - payload='payload_value', + name="name_value", + payload="payload_value", ) + def test_sign_jwt_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2051,7 +2487,9 @@ def test_sign_jwt_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.sign_jwt] = mock_rpc request = {} client.sign_jwt(request) @@ -2065,6 +2503,7 @@ def test_sign_jwt_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2080,12 +2519,17 @@ async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.sign_jwt in client._client._transport._wrapped_methods + assert ( + client._client._transport.sign_jwt + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.sign_jwt] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.sign_jwt + ] = mock_rpc request = {} await client.sign_jwt(request) @@ -2099,8 +2543,11 @@ async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=common.SignJwtRequest): +async def test_sign_jwt_async( + transport: str = "grpc_asyncio", request_type=common.SignJwtRequest +): client = IAMCredentialsAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2111,14 +2558,14 @@ async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=comm request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_jwt), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.SignJwtResponse( + key_id="key_id_value", + signed_jwt="signed_jwt_value", + ) + ) response = await client.sign_jwt(request) # Establish that the underlying gRPC stub method was called. @@ -2129,14 +2576,15 @@ async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=comm # Establish that the response is the type that we expect. assert isinstance(response, common.SignJwtResponse) - assert response.key_id == 'key_id_value' - assert response.signed_jwt == 'signed_jwt_value' + assert response.key_id == "key_id_value" + assert response.signed_jwt == "signed_jwt_value" @pytest.mark.asyncio async def test_sign_jwt_async_from_dict(): await test_sign_jwt_async(request_type=dict) + def test_sign_jwt_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2146,12 +2594,10 @@ def test_sign_jwt_field_headers(): # a field header. Set these to a non-empty value. request = common.SignJwtRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_jwt), "__call__") as call: call.return_value = common.SignJwtResponse() client.sign_jwt(request) @@ -2163,9 +2609,9 @@ def test_sign_jwt_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2178,13 +2624,13 @@ async def test_sign_jwt_field_headers_async(): # a field header. Set these to a non-empty value. request = common.SignJwtRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse()) + with mock.patch.object(type(client.transport.sign_jwt), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.SignJwtResponse() + ) await client.sign_jwt(request) # Establish that the underlying gRPC stub method was called. @@ -2195,9 +2641,9 @@ async def test_sign_jwt_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_sign_jwt_flattened(): @@ -2206,17 +2652,15 @@ def test_sign_jwt_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_jwt), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = common.SignJwtResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.sign_jwt( - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) # Establish that the underlying call was made with the expected @@ -2224,13 +2668,13 @@ def test_sign_jwt_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].payload - mock_val = 'payload_value' + mock_val = "payload_value" assert arg == mock_val @@ -2244,11 +2688,12 @@ def test_sign_jwt_flattened_error(): with pytest.raises(ValueError): client.sign_jwt( common.SignJwtRequest(), - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) + @pytest.mark.asyncio async def test_sign_jwt_flattened_async(): client = IAMCredentialsAsyncClient( @@ -2256,19 +2701,19 @@ async def test_sign_jwt_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_jwt), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = common.SignJwtResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.SignJwtResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.sign_jwt( - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) # Establish that the underlying call was made with the expected @@ -2276,15 +2721,16 @@ async def test_sign_jwt_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].payload - mock_val = 'payload_value' + mock_val = "payload_value" assert arg == mock_val + @pytest.mark.asyncio async def test_sign_jwt_flattened_error_async(): client = IAMCredentialsAsyncClient( @@ -2296,9 +2742,9 @@ async def test_sign_jwt_flattened_error_async(): with pytest.raises(ValueError): await client.sign_jwt( common.SignJwtRequest(), - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) @@ -2316,12 +2762,19 @@ def test_generate_access_token_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.generate_access_token in client._transport._wrapped_methods + assert ( + client._transport.generate_access_token + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.generate_access_token] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.generate_access_token] = ( + mock_rpc + ) request = {} client.generate_access_token(request) @@ -2336,7 +2789,9 @@ def test_generate_access_token_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_generate_access_token_rest_required_fields(request_type=common.GenerateAccessTokenRequest): +def test_generate_access_token_rest_required_fields( + request_type=common.GenerateAccessTokenRequest, +): transport_class = transports.IAMCredentialsRestTransport request_init = {} @@ -2344,53 +2799,56 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate request_init["scope"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_access_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_access_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' - jsonified_request["scope"] = 'scope_value' + jsonified_request["name"] = "name_value" + jsonified_request["scope"] = "scope_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_access_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_access_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = common.GenerateAccessTokenResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -2400,23 +2858,32 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate return_value = common.GenerateAccessTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.generate_access_token(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_generate_access_token_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.generate_access_token._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "scope", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "scope", + ) + ) + ) def test_generate_access_token_rest_flattened(): @@ -2426,18 +2893,18 @@ def test_generate_access_token_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = common.GenerateAccessTokenResponse() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/serviceAccounts/sample2'} + sample_request = {"name": "projects/sample1/serviceAccounts/sample2"} # get truthy value for each flattened field mock_args = dict( - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) mock_args.update(sample_request) @@ -2448,7 +2915,7 @@ def test_generate_access_token_rest_flattened(): # Convert return value to protobuf type return_value = common.GenerateAccessTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -2458,10 +2925,14 @@ def test_generate_access_token_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/serviceAccounts/*}:generateAccessToken" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/serviceAccounts/*}:generateAccessToken" + % client.transport._host, + args[1], + ) -def test_generate_access_token_rest_flattened_error(transport: str = 'rest'): +def test_generate_access_token_rest_flattened_error(transport: str = "rest"): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2472,9 +2943,9 @@ def test_generate_access_token_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.generate_access_token( common.GenerateAccessTokenRequest(), - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) @@ -2497,8 +2968,12 @@ def test_generate_id_token_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.generate_id_token] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.generate_id_token] = ( + mock_rpc + ) request = {} client.generate_id_token(request) @@ -2513,7 +2988,9 @@ def test_generate_id_token_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTokenRequest): +def test_generate_id_token_rest_required_fields( + request_type=common.GenerateIdTokenRequest, +): transport_class = transports.IAMCredentialsRestTransport request_init = {} @@ -2521,53 +2998,56 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo request_init["audience"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_id_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_id_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' - jsonified_request["audience"] = 'audience_value' + jsonified_request["name"] = "name_value" + jsonified_request["audience"] = "audience_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_id_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_id_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "audience" in jsonified_request - assert jsonified_request["audience"] == 'audience_value' + assert jsonified_request["audience"] == "audience_value" client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = common.GenerateIdTokenResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -2577,23 +3057,32 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo return_value = common.GenerateIdTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.generate_id_token(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_generate_id_token_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.generate_id_token._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "audience", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "audience", + ) + ) + ) def test_generate_id_token_rest_flattened(): @@ -2603,18 +3092,18 @@ def test_generate_id_token_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = common.GenerateIdTokenResponse() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/serviceAccounts/sample2'} + sample_request = {"name": "projects/sample1/serviceAccounts/sample2"} # get truthy value for each flattened field mock_args = dict( - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) mock_args.update(sample_request) @@ -2625,7 +3114,7 @@ def test_generate_id_token_rest_flattened(): # Convert return value to protobuf type return_value = common.GenerateIdTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -2635,10 +3124,14 @@ def test_generate_id_token_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/serviceAccounts/*}:generateIdToken" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/serviceAccounts/*}:generateIdToken" + % client.transport._host, + args[1], + ) -def test_generate_id_token_rest_flattened_error(transport: str = 'rest'): +def test_generate_id_token_rest_flattened_error(transport: str = "rest"): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2649,9 +3142,9 @@ def test_generate_id_token_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.generate_id_token( common.GenerateIdTokenRequest(), - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) @@ -2674,7 +3167,9 @@ def test_sign_blob_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.sign_blob] = mock_rpc request = {} @@ -2695,56 +3190,59 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): request_init = {} request_init["name"] = "" - request_init["payload"] = b'' + request_init["payload"] = b"" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_blob._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).sign_blob._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' - jsonified_request["payload"] = b'payload_blob' + jsonified_request["name"] = "name_value" + jsonified_request["payload"] = b"payload_blob" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_blob._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).sign_blob._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "payload" in jsonified_request - assert jsonified_request["payload"] == b'payload_blob' + assert jsonified_request["payload"] == b"payload_blob" client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = common.SignBlobResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -2754,23 +3252,32 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): return_value = common.SignBlobResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.sign_blob(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_sign_blob_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.sign_blob._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "payload", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "payload", + ) + ) + ) def test_sign_blob_rest_flattened(): @@ -2780,18 +3287,18 @@ def test_sign_blob_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = common.SignBlobResponse() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/serviceAccounts/sample2'} + sample_request = {"name": "projects/sample1/serviceAccounts/sample2"} # get truthy value for each flattened field mock_args = dict( - name='name_value', - delegates=['delegates_value'], - payload=b'payload_blob', + name="name_value", + delegates=["delegates_value"], + payload=b"payload_blob", ) mock_args.update(sample_request) @@ -2801,7 +3308,7 @@ def test_sign_blob_rest_flattened(): # Convert return value to protobuf type return_value = common.SignBlobResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -2811,10 +3318,14 @@ def test_sign_blob_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/serviceAccounts/*}:signBlob" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/serviceAccounts/*}:signBlob" + % client.transport._host, + args[1], + ) -def test_sign_blob_rest_flattened_error(transport: str = 'rest'): +def test_sign_blob_rest_flattened_error(transport: str = "rest"): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2825,9 +3336,9 @@ def test_sign_blob_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.sign_blob( common.SignBlobRequest(), - name='name_value', - delegates=['delegates_value'], - payload=b'payload_blob', + name="name_value", + delegates=["delegates_value"], + payload=b"payload_blob", ) @@ -2849,7 +3360,9 @@ def test_sign_jwt_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.sign_jwt] = mock_rpc request = {} @@ -2873,53 +3386,56 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): request_init["payload"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_jwt._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).sign_jwt._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' - jsonified_request["payload"] = 'payload_value' + jsonified_request["name"] = "name_value" + jsonified_request["payload"] = "payload_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_jwt._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).sign_jwt._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "payload" in jsonified_request - assert jsonified_request["payload"] == 'payload_value' + assert jsonified_request["payload"] == "payload_value" client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = common.SignJwtResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -2929,23 +3445,32 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): return_value = common.SignJwtResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.sign_jwt(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_sign_jwt_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.sign_jwt._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "payload", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "payload", + ) + ) + ) def test_sign_jwt_rest_flattened(): @@ -2955,18 +3480,18 @@ def test_sign_jwt_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = common.SignJwtResponse() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/serviceAccounts/sample2'} + sample_request = {"name": "projects/sample1/serviceAccounts/sample2"} # get truthy value for each flattened field mock_args = dict( - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) mock_args.update(sample_request) @@ -2976,7 +3501,7 @@ def test_sign_jwt_rest_flattened(): # Convert return value to protobuf type return_value = common.SignJwtResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -2986,10 +3511,14 @@ def test_sign_jwt_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/serviceAccounts/*}:signJwt" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/serviceAccounts/*}:signJwt" + % client.transport._host, + args[1], + ) -def test_sign_jwt_rest_flattened_error(transport: str = 'rest'): +def test_sign_jwt_rest_flattened_error(transport: str = "rest"): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3000,9 +3529,9 @@ def test_sign_jwt_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.sign_jwt( common.SignJwtRequest(), - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) @@ -3044,8 +3573,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = IAMCredentialsClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -3067,6 +3595,7 @@ def test_transport_instance(): client = IAMCredentialsClient(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.IAMCredentialsGrpcTransport( @@ -3081,18 +3610,23 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.IAMCredentialsGrpcTransport, - transports.IAMCredentialsGrpcAsyncIOTransport, - transports.IAMCredentialsRestTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IAMCredentialsGrpcTransport, + transports.IAMCredentialsGrpcAsyncIOTransport, + transports.IAMCredentialsRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = IAMCredentialsClient.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -3102,8 +3636,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -3118,8 +3651,8 @@ def test_generate_access_token_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: + type(client.transport.generate_access_token), "__call__" + ) as call: call.return_value = common.GenerateAccessTokenResponse() client.generate_access_token(request=None) @@ -3141,8 +3674,8 @@ def test_generate_id_token_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: + type(client.transport.generate_id_token), "__call__" + ) as call: call.return_value = common.GenerateIdTokenResponse() client.generate_id_token(request=None) @@ -3163,9 +3696,7 @@ def test_sign_blob_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_blob), "__call__") as call: call.return_value = common.SignBlobResponse() client.sign_blob(request=None) @@ -3186,9 +3717,7 @@ def test_sign_jwt_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_jwt), "__call__") as call: call.return_value = common.SignJwtResponse() client.sign_jwt(request=None) @@ -3209,8 +3738,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = IAMCredentialsAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -3226,12 +3754,14 @@ async def test_generate_access_token_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: + type(client.transport.generate_access_token), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse( - access_token='access_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.GenerateAccessTokenResponse( + access_token="access_token_value", + ) + ) await client.generate_access_token(request=None) # Establish that the underlying stub method was called. @@ -3253,12 +3783,14 @@ async def test_generate_id_token_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: + type(client.transport.generate_id_token), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse( - token='token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.GenerateIdTokenResponse( + token="token_value", + ) + ) await client.generate_id_token(request=None) # Establish that the underlying stub method was called. @@ -3279,14 +3811,14 @@ async def test_sign_blob_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_blob), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse( - key_id='key_id_value', - signed_blob=b'signed_blob_blob', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.SignBlobResponse( + key_id="key_id_value", + signed_blob=b"signed_blob_blob", + ) + ) await client.sign_blob(request=None) # Establish that the underlying stub method was called. @@ -3307,14 +3839,14 @@ async def test_sign_jwt_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_jwt), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.SignJwtResponse( + key_id="key_id_value", + signed_jwt="signed_jwt_value", + ) + ) await client.sign_jwt(request=None) # Establish that the underlying stub method was called. @@ -3332,20 +3864,24 @@ def test_transport_kind_rest(): assert transport.kind == "rest" -def test_generate_access_token_rest_bad_request(request_type=common.GenerateAccessTokenRequest): +def test_generate_access_token_rest_bad_request( + request_type=common.GenerateAccessTokenRequest, +): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request_init = {"name": "projects/sample1/serviceAccounts/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -3354,25 +3890,27 @@ def test_generate_access_token_rest_bad_request(request_type=common.GenerateAcce client.generate_access_token(request) -@pytest.mark.parametrize("request_type", [ - common.GenerateAccessTokenRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + common.GenerateAccessTokenRequest, + dict, + ], +) def test_generate_access_token_rest_call_success(request_type): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request_init = {"name": "projects/sample1/serviceAccounts/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = common.GenerateAccessTokenResponse( - access_token='access_token_value', + access_token="access_token_value", ) # Wrap the value into a proper Response obj @@ -3382,33 +3920,46 @@ def test_generate_access_token_rest_call_success(request_type): # Convert return value to protobuf type return_value = common.GenerateAccessTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.generate_access_token(request) # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateAccessTokenResponse) - assert response.access_token == 'access_token_value' + assert response.access_token == "access_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_generate_access_token_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.IAMCredentialsRestInterceptor(), + ) client = IAMCredentialsClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_access_token") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_access_token_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_access_token") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, "post_generate_access_token" + ) as post, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, + "post_generate_access_token_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, "pre_generate_access_token" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = common.GenerateAccessTokenRequest.pb(common.GenerateAccessTokenRequest()) + pb_message = common.GenerateAccessTokenRequest.pb( + common.GenerateAccessTokenRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3419,11 +3970,13 @@ def test_generate_access_token_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = common.GenerateAccessTokenResponse.to_json(common.GenerateAccessTokenResponse()) + return_value = common.GenerateAccessTokenResponse.to_json( + common.GenerateAccessTokenResponse() + ) req.return_value.content = return_value request = common.GenerateAccessTokenRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -3431,7 +3984,13 @@ def test_generate_access_token_rest_interceptors(null_interceptor): post.return_value = common.GenerateAccessTokenResponse() post_with_metadata.return_value = common.GenerateAccessTokenResponse(), metadata - client.generate_access_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.generate_access_token( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -3440,18 +3999,20 @@ def test_generate_access_token_rest_interceptors(null_interceptor): def test_generate_id_token_rest_bad_request(request_type=common.GenerateIdTokenRequest): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request_init = {"name": "projects/sample1/serviceAccounts/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -3460,25 +4021,27 @@ def test_generate_id_token_rest_bad_request(request_type=common.GenerateIdTokenR client.generate_id_token(request) -@pytest.mark.parametrize("request_type", [ - common.GenerateIdTokenRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + common.GenerateIdTokenRequest, + dict, + ], +) def test_generate_id_token_rest_call_success(request_type): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request_init = {"name": "projects/sample1/serviceAccounts/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = common.GenerateIdTokenResponse( - token='token_value', + token="token_value", ) # Wrap the value into a proper Response obj @@ -3488,29 +4051,40 @@ def test_generate_id_token_rest_call_success(request_type): # Convert return value to protobuf type return_value = common.GenerateIdTokenResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.generate_id_token(request) # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateIdTokenResponse) - assert response.token == 'token_value' + assert response.token == "token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_generate_id_token_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.IAMCredentialsRestInterceptor(), + ) client = IAMCredentialsClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_id_token") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_id_token_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_id_token") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, "post_generate_id_token" + ) as post, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, + "post_generate_id_token_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, "pre_generate_id_token" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -3525,11 +4099,13 @@ def test_generate_id_token_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = common.GenerateIdTokenResponse.to_json(common.GenerateIdTokenResponse()) + return_value = common.GenerateIdTokenResponse.to_json( + common.GenerateIdTokenResponse() + ) req.return_value.content = return_value request = common.GenerateIdTokenRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -3537,7 +4113,13 @@ def test_generate_id_token_rest_interceptors(null_interceptor): post.return_value = common.GenerateIdTokenResponse() post_with_metadata.return_value = common.GenerateIdTokenResponse(), metadata - client.generate_id_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.generate_id_token( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -3546,18 +4128,20 @@ def test_generate_id_token_rest_interceptors(null_interceptor): def test_sign_blob_rest_bad_request(request_type=common.SignBlobRequest): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request_init = {"name": "projects/sample1/serviceAccounts/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -3566,26 +4150,28 @@ def test_sign_blob_rest_bad_request(request_type=common.SignBlobRequest): client.sign_blob(request) -@pytest.mark.parametrize("request_type", [ - common.SignBlobRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + common.SignBlobRequest, + dict, + ], +) def test_sign_blob_rest_call_success(request_type): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request_init = {"name": "projects/sample1/serviceAccounts/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = common.SignBlobResponse( - key_id='key_id_value', - signed_blob=b'signed_blob_blob', + key_id="key_id_value", + signed_blob=b"signed_blob_blob", ) # Wrap the value into a proper Response obj @@ -3595,30 +4181,40 @@ def test_sign_blob_rest_call_success(request_type): # Convert return value to protobuf type return_value = common.SignBlobResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.sign_blob(request) # Establish that the response is the type that we expect. assert isinstance(response, common.SignBlobResponse) - assert response.key_id == 'key_id_value' - assert response.signed_blob == b'signed_blob_blob' + assert response.key_id == "key_id_value" + assert response.signed_blob == b"signed_blob_blob" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_sign_blob_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.IAMCredentialsRestInterceptor(), + ) client = IAMCredentialsClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_blob") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_blob_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_blob") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, "post_sign_blob" + ) as post, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, "post_sign_blob_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, "pre_sign_blob" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -3637,7 +4233,7 @@ def test_sign_blob_rest_interceptors(null_interceptor): req.return_value.content = return_value request = common.SignBlobRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -3645,7 +4241,13 @@ def test_sign_blob_rest_interceptors(null_interceptor): post.return_value = common.SignBlobResponse() post_with_metadata.return_value = common.SignBlobResponse(), metadata - client.sign_blob(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.sign_blob( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -3654,18 +4256,20 @@ def test_sign_blob_rest_interceptors(null_interceptor): def test_sign_jwt_rest_bad_request(request_type=common.SignJwtRequest): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request_init = {"name": "projects/sample1/serviceAccounts/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -3674,26 +4278,28 @@ def test_sign_jwt_rest_bad_request(request_type=common.SignJwtRequest): client.sign_jwt(request) -@pytest.mark.parametrize("request_type", [ - common.SignJwtRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + common.SignJwtRequest, + dict, + ], +) def test_sign_jwt_rest_call_success(request_type): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request_init = {"name": "projects/sample1/serviceAccounts/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', + key_id="key_id_value", + signed_jwt="signed_jwt_value", ) # Wrap the value into a proper Response obj @@ -3703,30 +4309,40 @@ def test_sign_jwt_rest_call_success(request_type): # Convert return value to protobuf type return_value = common.SignJwtResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.sign_jwt(request) # Establish that the response is the type that we expect. assert isinstance(response, common.SignJwtResponse) - assert response.key_id == 'key_id_value' - assert response.signed_jwt == 'signed_jwt_value' + assert response.key_id == "key_id_value" + assert response.signed_jwt == "signed_jwt_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_sign_jwt_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.IAMCredentialsRestInterceptor(), + ) client = IAMCredentialsClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_jwt") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_jwt_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_jwt") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, "post_sign_jwt" + ) as post, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, "post_sign_jwt_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.IAMCredentialsRestInterceptor, "pre_sign_jwt" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -3745,7 +4361,7 @@ def test_sign_jwt_rest_interceptors(null_interceptor): req.return_value.content = return_value request = common.SignJwtRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -3753,16 +4369,22 @@ def test_sign_jwt_rest_interceptors(null_interceptor): post.return_value = common.SignJwtResponse() post_with_metadata.return_value = common.SignJwtResponse(), metadata - client.sign_jwt(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.sign_jwt( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + def test_initialize_client_w_rest(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) assert client is not None @@ -3777,8 +4399,8 @@ def test_generate_access_token_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: + type(client.transport.generate_access_token), "__call__" + ) as call: client.generate_access_token(request=None) # Establish that the underlying stub method was called. @@ -3799,8 +4421,8 @@ def test_generate_id_token_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: + type(client.transport.generate_id_token), "__call__" + ) as call: client.generate_id_token(request=None) # Establish that the underlying stub method was called. @@ -3820,9 +4442,7 @@ def test_sign_blob_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_blob), "__call__") as call: client.sign_blob(request=None) # Establish that the underlying stub method was called. @@ -3842,9 +4462,7 @@ def test_sign_jwt_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: + with mock.patch.object(type(client.transport.sign_jwt), "__call__") as call: client.sign_jwt(request=None) # Establish that the underlying stub method was called. @@ -3865,18 +4483,21 @@ def test_transport_grpc_default(): transports.IAMCredentialsGrpcTransport, ) + def test_iam_credentials_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.IAMCredentialsTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_iam_credentials_base_transport(): # Instantiate the base transport. - with mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport.__init__') as Transport: + with mock.patch( + "google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.IAMCredentialsTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -3885,10 +4506,10 @@ def test_iam_credentials_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'generate_access_token', - 'generate_id_token', - 'sign_blob', - 'sign_jwt', + "generate_access_token", + "generate_id_token", + "sign_blob", + "sign_jwt", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3899,7 +4520,7 @@ def test_iam_credentials_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -3908,25 +4529,36 @@ def test_iam_credentials_base_transport(): def test_iam_credentials_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.IAMCredentialsTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_iam_credentials_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.IAMCredentialsTransport() @@ -3935,14 +4567,12 @@ def test_iam_credentials_base_transport_with_adc(): def test_iam_credentials_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) IAMCredentialsClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @@ -3957,12 +4587,12 @@ def test_iam_credentials_auth_adc(): def test_iam_credentials_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @@ -3976,48 +4606,46 @@ def test_iam_credentials_transport_auth_adc(transport_class): ], ) def test_iam_credentials_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.IAMCredentialsGrpcTransport, grpc_helpers), - (transports.IAMCredentialsGrpcAsyncIOTransport, grpc_helpers_async) + (transports.IAMCredentialsGrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_iam_credentials_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "iamcredentials.googleapis.com:443", credentials=creds, credentials_file=None, quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=["1", "2"], default_host="iamcredentials.googleapis.com", ssl_credentials=None, @@ -4028,10 +4656,14 @@ def test_iam_credentials_transport_create_channel(transport_class, grpc_helpers) ) -@pytest.mark.parametrize("transport_class", [transports.IAMCredentialsGrpcTransport, transports.IAMCredentialsGrpcAsyncIOTransport]) -def test_iam_credentials_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.IAMCredentialsGrpcTransport, + transports.IAMCredentialsGrpcAsyncIOTransport, + ], +) +def test_iam_credentials_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -4040,7 +4672,7 @@ def test_iam_credentials_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -4061,61 +4693,77 @@ def test_iam_credentials_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) + def test_iam_credentials_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.IAMCredentialsRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.IAMCredentialsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_iam_credentials_host_no_port(transport_name): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='iamcredentials.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="iamcredentials.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'iamcredentials.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://iamcredentials.googleapis.com' + "iamcredentials.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iamcredentials.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_iam_credentials_host_with_port(transport_name): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='iamcredentials.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="iamcredentials.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'iamcredentials.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://iamcredentials.googleapis.com:8000' + "iamcredentials.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iamcredentials.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_iam_credentials_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -4139,8 +4787,10 @@ def test_iam_credentials_client_transport_session_collision(transport_name): session1 = client1.transport.sign_jwt._session session2 = client2.transport.sign_jwt._session assert session1 != session2 + + def test_iam_credentials_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.IAMCredentialsGrpcTransport( @@ -4153,7 +4803,7 @@ def test_iam_credentials_grpc_transport_channel(): def test_iam_credentials_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.IAMCredentialsGrpcAsyncIOTransport( @@ -4168,12 +4818,22 @@ def test_iam_credentials_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.IAMCredentialsGrpcTransport, transports.IAMCredentialsGrpcAsyncIOTransport]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.IAMCredentialsGrpcTransport, + transports.IAMCredentialsGrpcAsyncIOTransport, + ], +) def test_iam_credentials_transport_channel_mtls_with_client_cert_source( - transport_class + transport_class, ): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -4182,7 +4842,7 @@ def test_iam_credentials_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -4212,17 +4872,23 @@ def test_iam_credentials_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.IAMCredentialsGrpcTransport, transports.IAMCredentialsGrpcAsyncIOTransport]) -def test_iam_credentials_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.IAMCredentialsGrpcTransport, + transports.IAMCredentialsGrpcAsyncIOTransport, + ], +) +def test_iam_credentials_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -4253,7 +4919,10 @@ def test_iam_credentials_transport_channel_mtls_with_adc( def test_service_account_path(): project = "squid" service_account = "clam" - expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + expected = "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) actual = IAMCredentialsClient.service_account_path(project, service_account) assert expected == actual @@ -4269,9 +4938,12 @@ def test_parse_service_account_path(): actual = IAMCredentialsClient.parse_service_account_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = IAMCredentialsClient.common_billing_account_path(billing_account) assert expected == actual @@ -4286,9 +4958,12 @@ def test_parse_common_billing_account_path(): actual = IAMCredentialsClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = IAMCredentialsClient.common_folder_path(folder) assert expected == actual @@ -4303,9 +4978,12 @@ def test_parse_common_folder_path(): actual = IAMCredentialsClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = IAMCredentialsClient.common_organization_path(organization) assert expected == actual @@ -4320,9 +4998,12 @@ def test_parse_common_organization_path(): actual = IAMCredentialsClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = IAMCredentialsClient.common_project_path(project) assert expected == actual @@ -4337,10 +5018,14 @@ def test_parse_common_project_path(): actual = IAMCredentialsClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = IAMCredentialsClient.common_location_path(project, location) assert expected == actual @@ -4360,14 +5045,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.IAMCredentialsTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.IAMCredentialsTransport, "_prep_wrapped_messages" + ) as prep: client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.IAMCredentialsTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.IAMCredentialsTransport, "_prep_wrapped_messages" + ) as prep: transport_class = IAMCredentialsClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -4378,10 +5067,11 @@ def test_client_with_default_client_info(): def test_transport_close_grpc(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -4390,10 +5080,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = IAMCredentialsAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -4401,10 +5092,11 @@ async def test_transport_close_grpc_asyncio(): def test_transport_close_rest(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -4412,13 +5104,12 @@ def test_transport_close_rest(): def test_client_ctx(): transports = [ - 'rest', - 'grpc', + "rest", + "grpc", ] for transport in transports: client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -4427,10 +5118,14 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport), - (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport), + (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -4445,7 +5140,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py index 7960c88ae9cd..1d1df26159c0 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py @@ -28,7 +28,6 @@ import os import shlex import sys -import logging from typing import Any # If extensions (or modules to document with autodoc) are in another directory, @@ -83,9 +82,9 @@ root_doc = "index" # General information about the project. -project = u"google-cloud-eventarc" -copyright = u"2025, Google, LLC" -author = u"Google APIs" +project = "google-cloud-eventarc" +copyright = "2025, Google, LLC" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -285,7 +284,7 @@ ( root_doc, "google-cloud-eventarc.tex", - u"google-cloud-eventarc Documentation", + "google-cloud-eventarc Documentation", author, "manual", ) @@ -386,6 +385,7 @@ napoleon_use_param = True napoleon_use_rtype = True + # Setup for sphinx behaviors such as warning filters. class UnexpectedUnindentFilter(logging.Filter): """Filter out warnings about unexpected unindentation following bullet lists.""" @@ -413,5 +413,5 @@ def setup(app: Any) -> None: """ # Sphinx's logger is hierarchical. Adding a filter to the # root 'sphinx' logger will catch warnings from all sub-loggers. - logger = logging.getLogger('sphinx') + logger = logging.getLogger("sphinx") logger.addFilter(UnexpectedUnindentFilter()) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index 20d981ec3ecc..8f1fd358103f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.eventarc_v1 import gapic_version as package_version +import sys import google.api_core as api_core -import sys +from google.cloud.eventarc_v1 import gapic_version as package_version __version__ = package_version.__version__ @@ -28,74 +28,80 @@ import importlib_metadata as metadata -from .services.eventarc import EventarcClient -from .services.eventarc import EventarcAsyncClient - +from .services.eventarc import EventarcAsyncClient, EventarcClient from .types.channel import Channel from .types.channel_connection import ChannelConnection -from .types.discovery import EventType -from .types.discovery import FilteringAttribute -from .types.discovery import Provider -from .types.eventarc import CreateChannelConnectionRequest -from .types.eventarc import CreateChannelRequest -from .types.eventarc import CreateTriggerRequest -from .types.eventarc import DeleteChannelConnectionRequest -from .types.eventarc import DeleteChannelRequest -from .types.eventarc import DeleteTriggerRequest -from .types.eventarc import GetChannelConnectionRequest -from .types.eventarc import GetChannelRequest -from .types.eventarc import GetGoogleChannelConfigRequest -from .types.eventarc import GetProviderRequest -from .types.eventarc import GetTriggerRequest -from .types.eventarc import ListChannelConnectionsRequest -from .types.eventarc import ListChannelConnectionsResponse -from .types.eventarc import ListChannelsRequest -from .types.eventarc import ListChannelsResponse -from .types.eventarc import ListProvidersRequest -from .types.eventarc import ListProvidersResponse -from .types.eventarc import ListTriggersRequest -from .types.eventarc import ListTriggersResponse -from .types.eventarc import OperationMetadata -from .types.eventarc import UpdateChannelRequest -from .types.eventarc import UpdateGoogleChannelConfigRequest -from .types.eventarc import UpdateTriggerRequest +from .types.discovery import EventType, FilteringAttribute, Provider +from .types.eventarc import ( + CreateChannelConnectionRequest, + CreateChannelRequest, + CreateTriggerRequest, + DeleteChannelConnectionRequest, + DeleteChannelRequest, + DeleteTriggerRequest, + GetChannelConnectionRequest, + GetChannelRequest, + GetGoogleChannelConfigRequest, + GetProviderRequest, + GetTriggerRequest, + ListChannelConnectionsRequest, + ListChannelConnectionsResponse, + ListChannelsRequest, + ListChannelsResponse, + ListProvidersRequest, + ListProvidersResponse, + ListTriggersRequest, + ListTriggersResponse, + OperationMetadata, + UpdateChannelRequest, + UpdateGoogleChannelConfigRequest, + UpdateTriggerRequest, +) from .types.google_channel_config import GoogleChannelConfig -from .types.trigger import CloudRun -from .types.trigger import Destination -from .types.trigger import EventFilter -from .types.trigger import GKE -from .types.trigger import Pubsub -from .types.trigger import StateCondition -from .types.trigger import Transport -from .types.trigger import Trigger +from .types.trigger import ( + GKE, + CloudRun, + Destination, + EventFilter, + Pubsub, + StateCondition, + Transport, + Trigger, +) -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.eventarc_v1") # type: ignore - api_core.check_dependency_versions("google.cloud.eventarc_v1") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr( + api_core, "check_dependency_versions" +): # pragma: NO COVER + api_core.check_python_version("google.cloud.eventarc_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.eventarc_v1") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: - import warnings import sys + import warnings _py_version_str = sys.version.split()[0] _package_label = "google.cloud.eventarc_v1" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -133,64 +139,68 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'EventarcAsyncClient', -'Channel', -'ChannelConnection', -'CloudRun', -'CreateChannelConnectionRequest', -'CreateChannelRequest', -'CreateTriggerRequest', -'DeleteChannelConnectionRequest', -'DeleteChannelRequest', -'DeleteTriggerRequest', -'Destination', -'EventFilter', -'EventType', -'EventarcClient', -'FilteringAttribute', -'GKE', -'GetChannelConnectionRequest', -'GetChannelRequest', -'GetGoogleChannelConfigRequest', -'GetProviderRequest', -'GetTriggerRequest', -'GoogleChannelConfig', -'ListChannelConnectionsRequest', -'ListChannelConnectionsResponse', -'ListChannelsRequest', -'ListChannelsResponse', -'ListProvidersRequest', -'ListProvidersResponse', -'ListTriggersRequest', -'ListTriggersResponse', -'OperationMetadata', -'Provider', -'Pubsub', -'StateCondition', -'Transport', -'Trigger', -'UpdateChannelRequest', -'UpdateGoogleChannelConfigRequest', -'UpdateTriggerRequest', + "EventarcAsyncClient", + "Channel", + "ChannelConnection", + "CloudRun", + "CreateChannelConnectionRequest", + "CreateChannelRequest", + "CreateTriggerRequest", + "DeleteChannelConnectionRequest", + "DeleteChannelRequest", + "DeleteTriggerRequest", + "Destination", + "EventFilter", + "EventType", + "EventarcClient", + "FilteringAttribute", + "GKE", + "GetChannelConnectionRequest", + "GetChannelRequest", + "GetGoogleChannelConfigRequest", + "GetProviderRequest", + "GetTriggerRequest", + "GoogleChannelConfig", + "ListChannelConnectionsRequest", + "ListChannelConnectionsResponse", + "ListChannelsRequest", + "ListChannelsResponse", + "ListProvidersRequest", + "ListProvidersResponse", + "ListTriggersRequest", + "ListTriggersResponse", + "OperationMetadata", + "Provider", + "Pubsub", + "StateCondition", + "Transport", + "Trigger", + "UpdateChannelRequest", + "UpdateGoogleChannelConfigRequest", + "UpdateTriggerRequest", ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py index c604e58a85da..10e752b01114 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import EventarcClient from .async_client import EventarcAsyncClient +from .client import EventarcClient __all__ = ( - 'EventarcClient', - 'EventarcAsyncClient', + "EventarcClient", + "EventarcAsyncClient", ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 2b147003e696..99358fdf7e36 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -14,57 +14,75 @@ # limitations under the License. # import logging as std_logging -from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.eventarc_v1 import gapic_version as package_version +from collections import OrderedDict +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.eventarc_v1 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.eventarc_v1.services.eventarc import pagers -from google.cloud.eventarc_v1.types import channel -from google.cloud.eventarc_v1.types import channel as gce_channel -from google.cloud.eventarc_v1.types import channel_connection -from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection -from google.cloud.eventarc_v1.types import discovery -from google.cloud.eventarc_v1.types import eventarc -from google.cloud.eventarc_v1.types import google_channel_config -from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config -from google.cloud.eventarc_v1.types import trigger -from google.cloud.eventarc_v1.types import trigger as gce_trigger -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import EventarcGrpcAsyncIOTransport +from google.cloud.eventarc_v1.services.eventarc import pagers +from google.cloud.eventarc_v1.types import ( + channel, + channel_connection, + discovery, + eventarc, + google_channel_config, + trigger, +) +from google.cloud.eventarc_v1.types import channel as gce_channel +from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection +from google.cloud.eventarc_v1.types import ( + google_channel_config as gce_google_channel_config, +) +from google.cloud.eventarc_v1.types import trigger as gce_trigger +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import ( + iam_policy_pb2, # type: ignore + policy_pb2, # type: ignore +) +from google.longrunning import operations_pb2 # type: ignore + from .client import EventarcClient +from .transports.base import DEFAULT_CLIENT_INFO, EventarcTransport +from .transports.grpc_asyncio import EventarcGrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class EventarcAsyncClient: """Eventarc allows users to subscribe to various events that are provided by Google Cloud services and forward them to supported @@ -83,13 +101,17 @@ class EventarcAsyncClient: channel_path = staticmethod(EventarcClient.channel_path) parse_channel_path = staticmethod(EventarcClient.parse_channel_path) channel_connection_path = staticmethod(EventarcClient.channel_connection_path) - parse_channel_connection_path = staticmethod(EventarcClient.parse_channel_connection_path) + parse_channel_connection_path = staticmethod( + EventarcClient.parse_channel_connection_path + ) cloud_function_path = staticmethod(EventarcClient.cloud_function_path) parse_cloud_function_path = staticmethod(EventarcClient.parse_cloud_function_path) crypto_key_path = staticmethod(EventarcClient.crypto_key_path) parse_crypto_key_path = staticmethod(EventarcClient.parse_crypto_key_path) google_channel_config_path = staticmethod(EventarcClient.google_channel_config_path) - parse_google_channel_config_path = staticmethod(EventarcClient.parse_google_channel_config_path) + parse_google_channel_config_path = staticmethod( + EventarcClient.parse_google_channel_config_path + ) provider_path = staticmethod(EventarcClient.provider_path) parse_provider_path = staticmethod(EventarcClient.parse_provider_path) service_path = staticmethod(EventarcClient.service_path) @@ -100,12 +122,18 @@ class EventarcAsyncClient: parse_trigger_path = staticmethod(EventarcClient.parse_trigger_path) workflow_path = staticmethod(EventarcClient.workflow_path) parse_workflow_path = staticmethod(EventarcClient.parse_workflow_path) - common_billing_account_path = staticmethod(EventarcClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(EventarcClient.parse_common_billing_account_path) + common_billing_account_path = staticmethod( + EventarcClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + EventarcClient.parse_common_billing_account_path + ) common_folder_path = staticmethod(EventarcClient.common_folder_path) parse_common_folder_path = staticmethod(EventarcClient.parse_common_folder_path) common_organization_path = staticmethod(EventarcClient.common_organization_path) - parse_common_organization_path = staticmethod(EventarcClient.parse_common_organization_path) + parse_common_organization_path = staticmethod( + EventarcClient.parse_common_organization_path + ) common_project_path = staticmethod(EventarcClient.common_project_path) parse_common_project_path = staticmethod(EventarcClient.parse_common_project_path) common_location_path = staticmethod(EventarcClient.common_location_path) @@ -151,7 +179,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -214,12 +244,16 @@ def universe_domain(self) -> str: get_transport_class = EventarcClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, EventarcTransport, Callable[..., EventarcTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, EventarcTransport, Callable[..., EventarcTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the eventarc async client. Args: @@ -277,31 +311,39 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.eventarc_v1.EventarcAsyncClient`.", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.cloud.eventarc.v1.Eventarc", "credentialsType": None, - } + }, ) - async def get_trigger(self, - request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> trigger.Trigger: + async def get_trigger( + self, + request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> trigger.Trigger: r"""Get a single trigger. .. code-block:: python @@ -359,10 +401,14 @@ async def sample_get_trigger(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -376,14 +422,14 @@ async def sample_get_trigger(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_trigger] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_trigger + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -400,14 +446,15 @@ async def sample_get_trigger(): # Done; return the response. return response - async def list_triggers(self, - request: Optional[Union[eventarc.ListTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTriggersAsyncPager: + async def list_triggers( + self, + request: Optional[Union[eventarc.ListTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTriggersAsyncPager: r"""List triggers. .. code-block:: python @@ -468,10 +515,14 @@ async def sample_list_triggers(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -485,14 +536,14 @@ async def sample_list_triggers(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_triggers] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_triggers + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -520,16 +571,17 @@ async def sample_list_triggers(): # Done; return the response. return response - async def create_trigger(self, - request: Optional[Union[eventarc.CreateTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - trigger: Optional[gce_trigger.Trigger] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_trigger( + self, + request: Optional[Union[eventarc.CreateTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + trigger: Optional[gce_trigger.Trigger] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Create a new trigger in a particular project and location. @@ -617,10 +669,14 @@ async def sample_create_trigger(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, trigger, trigger_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -638,14 +694,14 @@ async def sample_create_trigger(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_trigger] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_trigger + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -670,16 +726,17 @@ async def sample_create_trigger(): # Done; return the response. return response - async def update_trigger(self, - request: Optional[Union[eventarc.UpdateTriggerRequest, dict]] = None, - *, - trigger: Optional[gce_trigger.Trigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - allow_missing: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_trigger( + self, + request: Optional[Union[eventarc.UpdateTriggerRequest, dict]] = None, + *, + trigger: Optional[gce_trigger.Trigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + allow_missing: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Update a single trigger. .. code-block:: python @@ -759,10 +816,14 @@ async def sample_update_trigger(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [trigger, update_mask, allow_missing] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -780,14 +841,16 @@ async def sample_update_trigger(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_trigger] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_trigger + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("trigger.name", request.trigger.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("trigger.name", request.trigger.name),) + ), ) # Validate the universe domain. @@ -812,15 +875,16 @@ async def sample_update_trigger(): # Done; return the response. return response - async def delete_trigger(self, - request: Optional[Union[eventarc.DeleteTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - allow_missing: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_trigger( + self, + request: Optional[Union[eventarc.DeleteTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + allow_missing: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Delete a single trigger. .. code-block:: python @@ -894,10 +958,14 @@ async def sample_delete_trigger(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, allow_missing] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -913,14 +981,14 @@ async def sample_delete_trigger(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_trigger] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_trigger + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -945,14 +1013,15 @@ async def sample_delete_trigger(): # Done; return the response. return response - async def get_channel(self, - request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> channel.Channel: + async def get_channel( + self, + request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel.Channel: r"""Get a single Channel. .. code-block:: python @@ -1016,10 +1085,14 @@ async def sample_get_channel(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1033,14 +1106,14 @@ async def sample_get_channel(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_channel] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_channel + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1057,14 +1130,15 @@ async def sample_get_channel(): # Done; return the response. return response - async def list_channels(self, - request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListChannelsAsyncPager: + async def list_channels( + self, + request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListChannelsAsyncPager: r"""List channels. .. code-block:: python @@ -1125,10 +1199,14 @@ async def sample_list_channels(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1142,14 +1220,14 @@ async def sample_list_channels(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_channels] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_channels + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1177,16 +1255,17 @@ async def sample_list_channels(): # Done; return the response. return response - async def create_channel(self, - request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, - *, - parent: Optional[str] = None, - channel: Optional[gce_channel.Channel] = None, - channel_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_channel( + self, + request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel: Optional[gce_channel.Channel] = None, + channel_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Create a new channel in a particular project and location. @@ -1274,10 +1353,14 @@ async def sample_create_channel(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, channel, channel_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1295,14 +1378,14 @@ async def sample_create_channel(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_channel_] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_channel_ + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1327,15 +1410,16 @@ async def sample_create_channel(): # Done; return the response. return response - async def update_channel(self, - request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, - *, - channel: Optional[gce_channel.Channel] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_channel( + self, + request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, + *, + channel: Optional[gce_channel.Channel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Update a single channel. .. code-block:: python @@ -1410,10 +1494,14 @@ async def sample_update_channel(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [channel, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1429,14 +1517,16 @@ async def sample_update_channel(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_channel] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_channel + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("channel.name", request.channel.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("channel.name", request.channel.name),) + ), ) # Validate the universe domain. @@ -1461,14 +1551,15 @@ async def sample_update_channel(): # Done; return the response. return response - async def delete_channel(self, - request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_channel( + self, + request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Delete a single channel. .. code-block:: python @@ -1537,10 +1628,14 @@ async def sample_delete_channel(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1554,14 +1649,14 @@ async def sample_delete_channel(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_channel] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_channel + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1586,14 +1681,15 @@ async def sample_delete_channel(): # Done; return the response. return response - async def get_provider(self, - request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> discovery.Provider: + async def get_provider( + self, + request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discovery.Provider: r"""Get a single Provider. .. code-block:: python @@ -1651,10 +1747,14 @@ async def sample_get_provider(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1668,14 +1768,14 @@ async def sample_get_provider(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_provider] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_provider + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1692,14 +1792,15 @@ async def sample_get_provider(): # Done; return the response. return response - async def list_providers(self, - request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListProvidersAsyncPager: + async def list_providers( + self, + request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListProvidersAsyncPager: r"""List providers. .. code-block:: python @@ -1760,10 +1861,14 @@ async def sample_list_providers(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1777,14 +1882,14 @@ async def sample_list_providers(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_providers] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_providers + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1812,14 +1917,15 @@ async def sample_list_providers(): # Done; return the response. return response - async def get_channel_connection(self, - request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> channel_connection.ChannelConnection: + async def get_channel_connection( + self, + request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel_connection.ChannelConnection: r"""Get a single ChannelConnection. .. code-block:: python @@ -1882,10 +1988,14 @@ async def sample_get_channel_connection(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1899,14 +2009,14 @@ async def sample_get_channel_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_channel_connection] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_channel_connection + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1923,14 +2033,15 @@ async def sample_get_channel_connection(): # Done; return the response. return response - async def list_channel_connections(self, - request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListChannelConnectionsAsyncPager: + async def list_channel_connections( + self, + request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListChannelConnectionsAsyncPager: r"""List channel connections. .. code-block:: python @@ -1992,10 +2103,14 @@ async def sample_list_channel_connections(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2009,14 +2124,14 @@ async def sample_list_channel_connections(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_channel_connections] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_channel_connections + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2044,16 +2159,17 @@ async def sample_list_channel_connections(): # Done; return the response. return response - async def create_channel_connection(self, - request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, - channel_connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_channel_connection( + self, + request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, + channel_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Create a new ChannelConnection in a particular project and location. @@ -2141,10 +2257,14 @@ async def sample_create_channel_connection(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, channel_connection, channel_connection_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2162,14 +2282,14 @@ async def sample_create_channel_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_channel_connection] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_channel_connection + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2194,14 +2314,15 @@ async def sample_create_channel_connection(): # Done; return the response. return response - async def delete_channel_connection(self, - request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_channel_connection( + self, + request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Delete a single ChannelConnection. .. code-block:: python @@ -2268,10 +2389,14 @@ async def sample_delete_channel_connection(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2285,14 +2410,14 @@ async def sample_delete_channel_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_channel_connection] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_channel_connection + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2317,14 +2442,15 @@ async def sample_delete_channel_connection(): # Done; return the response. return response - async def get_google_channel_config(self, - request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> google_channel_config.GoogleChannelConfig: + async def get_google_channel_config( + self, + request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> google_channel_config.GoogleChannelConfig: r"""Get a GoogleChannelConfig .. code-block:: python @@ -2388,10 +2514,14 @@ async def sample_get_google_channel_config(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2405,14 +2535,14 @@ async def sample_get_google_channel_config(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_google_channel_config] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_google_channel_config + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2429,15 +2559,20 @@ async def sample_get_google_channel_config(): # Done; return the response. return response - async def update_google_channel_config(self, - request: Optional[Union[eventarc.UpdateGoogleChannelConfigRequest, dict]] = None, - *, - google_channel_config: Optional[gce_google_channel_config.GoogleChannelConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gce_google_channel_config.GoogleChannelConfig: + async def update_google_channel_config( + self, + request: Optional[ + Union[eventarc.UpdateGoogleChannelConfigRequest, dict] + ] = None, + *, + google_channel_config: Optional[ + gce_google_channel_config.GoogleChannelConfig + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gce_google_channel_config.GoogleChannelConfig: r"""Update a single GoogleChannelConfig .. code-block:: python @@ -2511,10 +2646,14 @@ async def sample_update_google_channel_config(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [google_channel_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2530,14 +2669,16 @@ async def sample_update_google_channel_config(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_google_channel_config] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_google_channel_config + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("google_channel_config.name", request.google_channel_config.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("google_channel_config.name", request.google_channel_config.name),) + ), ) # Validate the universe domain. @@ -2596,8 +2737,7 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -2605,7 +2745,11 @@ async def list_operations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2652,8 +2796,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -2661,7 +2804,11 @@ async def get_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2712,15 +2859,19 @@ async def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def cancel_operation( self, @@ -2767,15 +2918,19 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def set_iam_policy( self, @@ -2886,7 +3041,8 @@ async def set_iam_policy( # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("resource", request_pb.resource),)), + (("resource", request_pb.resource),) + ), ) # Validate the universe domain. @@ -2894,7 +3050,11 @@ async def set_iam_policy( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3009,7 +3169,8 @@ async def get_iam_policy( # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("resource", request_pb.resource),)), + (("resource", request_pb.resource),) + ), ) # Validate the universe domain. @@ -3017,7 +3178,11 @@ async def get_iam_policy( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3064,13 +3229,16 @@ async def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("resource", request_pb.resource),)), + (("resource", request_pb.resource),) + ), ) # Validate the universe domain. @@ -3078,7 +3246,11 @@ async def test_iam_permissions( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3125,8 +3297,7 @@ async def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -3134,7 +3305,11 @@ async def get_location( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3181,8 +3356,7 @@ async def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -3190,7 +3364,11 @@ async def list_locations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3201,12 +3379,13 @@ async def __aenter__(self) -> "EventarcAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "EventarcAsyncClient", -) +__all__ = ("EventarcAsyncClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 0e79129eaab4..e7023534c56a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -13,27 +13,38 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -from http import HTTPStatus import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) -from google.cloud.eventarc_v1 import gapic_version as package_version - +import google.protobuf from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.eventarc_v1 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -42,32 +53,40 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) -from google.cloud.eventarc_v1.services.eventarc import pagers -from google.cloud.eventarc_v1.types import channel -from google.cloud.eventarc_v1.types import channel as gce_channel -from google.cloud.eventarc_v1.types import channel_connection -from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection -from google.cloud.eventarc_v1.types import discovery -from google.cloud.eventarc_v1.types import eventarc -from google.cloud.eventarc_v1.types import google_channel_config -from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config -from google.cloud.eventarc_v1.types import trigger -from google.cloud.eventarc_v1.types import trigger as gce_trigger -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO +from google.cloud.eventarc_v1.services.eventarc import pagers +from google.cloud.eventarc_v1.types import ( + channel, + channel_connection, + discovery, + eventarc, + google_channel_config, + trigger, +) +from google.cloud.eventarc_v1.types import channel as gce_channel +from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection +from google.cloud.eventarc_v1.types import ( + google_channel_config as gce_google_channel_config, +) +from google.cloud.eventarc_v1.types import trigger as gce_trigger +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import ( + iam_policy_pb2, # type: ignore + policy_pb2, # type: ignore +) +from google.longrunning import operations_pb2 # type: ignore + +from .transports.base import DEFAULT_CLIENT_INFO, EventarcTransport from .transports.grpc import EventarcGrpcTransport from .transports.grpc_asyncio import EventarcGrpcAsyncIOTransport from .transports.rest import EventarcRestTransport @@ -80,14 +99,16 @@ class EventarcClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[EventarcTransport]] _transport_registry["grpc"] = EventarcGrpcTransport _transport_registry["grpc_asyncio"] = EventarcGrpcAsyncIOTransport _transport_registry["rest"] = EventarcRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[EventarcTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[EventarcTransport]: """Returns an appropriate transport class. Args: @@ -166,14 +187,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -212,8 +235,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: EventarcClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -230,69 +252,135 @@ def transport(self) -> EventarcTransport: return self._transport @staticmethod - def channel_path(project: str,location: str,channel: str,) -> str: + def channel_path( + project: str, + location: str, + channel: str, + ) -> str: """Returns a fully-qualified channel string.""" - return "projects/{project}/locations/{location}/channels/{channel}".format(project=project, location=location, channel=channel, ) + return "projects/{project}/locations/{location}/channels/{channel}".format( + project=project, + location=location, + channel=channel, + ) @staticmethod - def parse_channel_path(path: str) -> Dict[str,str]: + def parse_channel_path(path: str) -> Dict[str, str]: """Parses a channel path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/channels/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/channels/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def channel_connection_path(project: str,location: str,channel_connection: str,) -> str: + def channel_connection_path( + project: str, + location: str, + channel_connection: str, + ) -> str: """Returns a fully-qualified channel_connection string.""" - return "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format(project=project, location=location, channel_connection=channel_connection, ) + return "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format( + project=project, + location=location, + channel_connection=channel_connection, + ) @staticmethod - def parse_channel_connection_path(path: str) -> Dict[str,str]: + def parse_channel_connection_path(path: str) -> Dict[str, str]: """Parses a channel_connection path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/channelConnections/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/channelConnections/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def cloud_function_path(project: str,location: str,function: str,) -> str: + def cloud_function_path( + project: str, + location: str, + function: str, + ) -> str: """Returns a fully-qualified cloud_function string.""" - return "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, ) + return "projects/{project}/locations/{location}/functions/{function}".format( + project=project, + location=location, + function=function, + ) @staticmethod - def parse_cloud_function_path(path: str) -> Dict[str,str]: + def parse_cloud_function_path(path: str) -> Dict[str, str]: """Parses a cloud_function path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/functions/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/functions/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: """Returns a fully-qualified crypto_key string.""" - return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) @staticmethod - def parse_crypto_key_path(path: str) -> Dict[str,str]: + def parse_crypto_key_path(path: str) -> Dict[str, str]: """Parses a crypto_key path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def google_channel_config_path(project: str,location: str,) -> str: + def google_channel_config_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified google_channel_config string.""" - return "projects/{project}/locations/{location}/googleChannelConfig".format(project=project, location=location, ) + return "projects/{project}/locations/{location}/googleChannelConfig".format( + project=project, + location=location, + ) @staticmethod - def parse_google_channel_config_path(path: str) -> Dict[str,str]: + def parse_google_channel_config_path(path: str) -> Dict[str, str]: """Parses a google_channel_config path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/googleChannelConfig$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/googleChannelConfig$", + path, + ) return m.groupdict() if m else {} @staticmethod - def provider_path(project: str,location: str,provider: str,) -> str: + def provider_path( + project: str, + location: str, + provider: str, + ) -> str: """Returns a fully-qualified provider string.""" - return "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) + return "projects/{project}/locations/{location}/providers/{provider}".format( + project=project, + location=location, + provider=provider, + ) @staticmethod - def parse_provider_path(path: str) -> Dict[str,str]: + def parse_provider_path(path: str) -> Dict[str, str]: """Parses a provider path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/providers/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/providers/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod @@ -301,101 +389,156 @@ def service_path() -> str: return "*".format() @staticmethod - def parse_service_path(path: str) -> Dict[str,str]: + def parse_service_path(path: str) -> Dict[str, str]: """Parses a service path into its component segments.""" m = re.match(r"^.*$", path) return m.groupdict() if m else {} @staticmethod - def service_account_path(project: str,service_account: str,) -> str: + def service_account_path( + project: str, + service_account: str, + ) -> str: """Returns a fully-qualified service_account string.""" - return "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + return "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) @staticmethod - def parse_service_account_path(path: str) -> Dict[str,str]: + def parse_service_account_path(path: str) -> Dict[str, str]: """Parses a service_account path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def trigger_path(project: str,location: str,trigger: str,) -> str: + def trigger_path( + project: str, + location: str, + trigger: str, + ) -> str: """Returns a fully-qualified trigger string.""" - return "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) + return "projects/{project}/locations/{location}/triggers/{trigger}".format( + project=project, + location=location, + trigger=trigger, + ) @staticmethod - def parse_trigger_path(path: str) -> Dict[str,str]: + def parse_trigger_path(path: str) -> Dict[str, str]: """Parses a trigger path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/triggers/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/triggers/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def workflow_path(project: str,location: str,workflow: str,) -> str: + def workflow_path( + project: str, + location: str, + workflow: str, + ) -> str: """Returns a fully-qualified workflow string.""" - return "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) + return "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, + location=location, + workflow=workflow, + ) @staticmethod - def parse_workflow_path(path: str) -> Dict[str,str]: + def parse_workflow_path(path: str) -> Dict[str, str]: """Parses a workflow path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -427,14 +570,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = EventarcClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -447,7 +594,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -472,7 +621,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -495,7 +646,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint) -> str: + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: """Return the API endpoint used by the client. Args: @@ -511,17 +664,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = EventarcClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = EventarcClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -557,15 +718,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -598,12 +762,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, EventarcTransport, Callable[..., EventarcTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, EventarcTransport, Callable[..., EventarcTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the eventarc client. Args: @@ -661,13 +829,21 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = EventarcClient._read_environment_variables() - self._client_cert_source = EventarcClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = EventarcClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + EventarcClient._read_environment_variables() + ) + self._client_cert_source = EventarcClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = EventarcClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. @@ -679,7 +855,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -688,30 +866,37 @@ def __init__(self, *, if transport_provided: # transport is a EventarcTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly." ) self._transport = cast(EventarcTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - EventarcClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or EventarcClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[EventarcTransport], Callable[..., EventarcTransport]] = ( + transport_init: Union[ + Type[EventarcTransport], Callable[..., EventarcTransport] + ] = ( EventarcClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EventarcTransport], transport) @@ -730,28 +915,37 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.eventarc_v1.EventarcClient`.", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.cloud.eventarc.v1.Eventarc", "credentialsType": None, - } + }, ) - def get_trigger(self, - request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> trigger.Trigger: + def get_trigger( + self, + request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> trigger.Trigger: r"""Get a single trigger. .. code-block:: python @@ -809,10 +1003,14 @@ def sample_get_trigger(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -830,9 +1028,7 @@ def sample_get_trigger(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -849,14 +1045,15 @@ def sample_get_trigger(): # Done; return the response. return response - def list_triggers(self, - request: Optional[Union[eventarc.ListTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTriggersPager: + def list_triggers( + self, + request: Optional[Union[eventarc.ListTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTriggersPager: r"""List triggers. .. code-block:: python @@ -917,10 +1114,14 @@ def sample_list_triggers(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -938,9 +1139,7 @@ def sample_list_triggers(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -968,16 +1167,17 @@ def sample_list_triggers(): # Done; return the response. return response - def create_trigger(self, - request: Optional[Union[eventarc.CreateTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - trigger: Optional[gce_trigger.Trigger] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_trigger( + self, + request: Optional[Union[eventarc.CreateTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + trigger: Optional[gce_trigger.Trigger] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Create a new trigger in a particular project and location. @@ -1065,10 +1265,14 @@ def sample_create_trigger(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, trigger, trigger_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1090,9 +1294,7 @@ def sample_create_trigger(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1117,16 +1319,17 @@ def sample_create_trigger(): # Done; return the response. return response - def update_trigger(self, - request: Optional[Union[eventarc.UpdateTriggerRequest, dict]] = None, - *, - trigger: Optional[gce_trigger.Trigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - allow_missing: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_trigger( + self, + request: Optional[Union[eventarc.UpdateTriggerRequest, dict]] = None, + *, + trigger: Optional[gce_trigger.Trigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + allow_missing: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Update a single trigger. .. code-block:: python @@ -1206,10 +1409,14 @@ def sample_update_trigger(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [trigger, update_mask, allow_missing] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1231,9 +1438,9 @@ def sample_update_trigger(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("trigger.name", request.trigger.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("trigger.name", request.trigger.name),) + ), ) # Validate the universe domain. @@ -1258,15 +1465,16 @@ def sample_update_trigger(): # Done; return the response. return response - def delete_trigger(self, - request: Optional[Union[eventarc.DeleteTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - allow_missing: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_trigger( + self, + request: Optional[Union[eventarc.DeleteTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + allow_missing: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Delete a single trigger. .. code-block:: python @@ -1340,10 +1548,14 @@ def sample_delete_trigger(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, allow_missing] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1363,9 +1575,7 @@ def sample_delete_trigger(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1390,14 +1600,15 @@ def sample_delete_trigger(): # Done; return the response. return response - def get_channel(self, - request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> channel.Channel: + def get_channel( + self, + request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel.Channel: r"""Get a single Channel. .. code-block:: python @@ -1461,10 +1672,14 @@ def sample_get_channel(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1482,9 +1697,7 @@ def sample_get_channel(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1501,14 +1714,15 @@ def sample_get_channel(): # Done; return the response. return response - def list_channels(self, - request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListChannelsPager: + def list_channels( + self, + request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListChannelsPager: r"""List channels. .. code-block:: python @@ -1569,10 +1783,14 @@ def sample_list_channels(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1590,9 +1808,7 @@ def sample_list_channels(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1620,16 +1836,17 @@ def sample_list_channels(): # Done; return the response. return response - def create_channel(self, - request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, - *, - parent: Optional[str] = None, - channel: Optional[gce_channel.Channel] = None, - channel_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_channel( + self, + request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel: Optional[gce_channel.Channel] = None, + channel_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Create a new channel in a particular project and location. @@ -1717,10 +1934,14 @@ def sample_create_channel(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, channel, channel_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1742,9 +1963,7 @@ def sample_create_channel(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1769,15 +1988,16 @@ def sample_create_channel(): # Done; return the response. return response - def update_channel(self, - request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, - *, - channel: Optional[gce_channel.Channel] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_channel( + self, + request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, + *, + channel: Optional[gce_channel.Channel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Update a single channel. .. code-block:: python @@ -1852,10 +2072,14 @@ def sample_update_channel(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [channel, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1875,9 +2099,9 @@ def sample_update_channel(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("channel.name", request.channel.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("channel.name", request.channel.name),) + ), ) # Validate the universe domain. @@ -1902,14 +2126,15 @@ def sample_update_channel(): # Done; return the response. return response - def delete_channel(self, - request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_channel( + self, + request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Delete a single channel. .. code-block:: python @@ -1978,10 +2203,14 @@ def sample_delete_channel(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1999,9 +2228,7 @@ def sample_delete_channel(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2026,14 +2253,15 @@ def sample_delete_channel(): # Done; return the response. return response - def get_provider(self, - request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> discovery.Provider: + def get_provider( + self, + request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discovery.Provider: r"""Get a single Provider. .. code-block:: python @@ -2091,10 +2319,14 @@ def sample_get_provider(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2112,9 +2344,7 @@ def sample_get_provider(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2131,14 +2361,15 @@ def sample_get_provider(): # Done; return the response. return response - def list_providers(self, - request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListProvidersPager: + def list_providers( + self, + request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListProvidersPager: r"""List providers. .. code-block:: python @@ -2199,10 +2430,14 @@ def sample_list_providers(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2220,9 +2455,7 @@ def sample_list_providers(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2250,14 +2483,15 @@ def sample_list_providers(): # Done; return the response. return response - def get_channel_connection(self, - request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> channel_connection.ChannelConnection: + def get_channel_connection( + self, + request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel_connection.ChannelConnection: r"""Get a single ChannelConnection. .. code-block:: python @@ -2320,10 +2554,14 @@ def sample_get_channel_connection(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2341,9 +2579,7 @@ def sample_get_channel_connection(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2360,14 +2596,15 @@ def sample_get_channel_connection(): # Done; return the response. return response - def list_channel_connections(self, - request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListChannelConnectionsPager: + def list_channel_connections( + self, + request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListChannelConnectionsPager: r"""List channel connections. .. code-block:: python @@ -2429,10 +2666,14 @@ def sample_list_channel_connections(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2450,9 +2691,7 @@ def sample_list_channel_connections(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2480,16 +2719,17 @@ def sample_list_channel_connections(): # Done; return the response. return response - def create_channel_connection(self, - request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, - channel_connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_channel_connection( + self, + request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, + channel_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Create a new ChannelConnection in a particular project and location. @@ -2577,10 +2817,14 @@ def sample_create_channel_connection(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, channel_connection, channel_connection_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2597,14 +2841,14 @@ def sample_create_channel_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_channel_connection] + rpc = self._transport._wrapped_methods[ + self._transport.create_channel_connection + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2629,14 +2873,15 @@ def sample_create_channel_connection(): # Done; return the response. return response - def delete_channel_connection(self, - request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_channel_connection( + self, + request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Delete a single ChannelConnection. .. code-block:: python @@ -2703,10 +2948,14 @@ def sample_delete_channel_connection(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2719,14 +2968,14 @@ def sample_delete_channel_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_channel_connection] + rpc = self._transport._wrapped_methods[ + self._transport.delete_channel_connection + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2751,14 +3000,15 @@ def sample_delete_channel_connection(): # Done; return the response. return response - def get_google_channel_config(self, - request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> google_channel_config.GoogleChannelConfig: + def get_google_channel_config( + self, + request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> google_channel_config.GoogleChannelConfig: r"""Get a GoogleChannelConfig .. code-block:: python @@ -2822,10 +3072,14 @@ def sample_get_google_channel_config(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2838,14 +3092,14 @@ def sample_get_google_channel_config(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_google_channel_config] + rpc = self._transport._wrapped_methods[ + self._transport.get_google_channel_config + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2862,15 +3116,20 @@ def sample_get_google_channel_config(): # Done; return the response. return response - def update_google_channel_config(self, - request: Optional[Union[eventarc.UpdateGoogleChannelConfigRequest, dict]] = None, - *, - google_channel_config: Optional[gce_google_channel_config.GoogleChannelConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gce_google_channel_config.GoogleChannelConfig: + def update_google_channel_config( + self, + request: Optional[ + Union[eventarc.UpdateGoogleChannelConfigRequest, dict] + ] = None, + *, + google_channel_config: Optional[ + gce_google_channel_config.GoogleChannelConfig + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gce_google_channel_config.GoogleChannelConfig: r"""Update a single GoogleChannelConfig .. code-block:: python @@ -2944,10 +3203,14 @@ def sample_update_google_channel_config(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [google_channel_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2962,14 +3225,16 @@ def sample_update_google_channel_config(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_google_channel_config] + rpc = self._transport._wrapped_methods[ + self._transport.update_google_channel_config + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("google_channel_config.name", request.google_channel_config.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("google_channel_config.name", request.google_channel_config.name),) + ), ) # Validate the universe domain. @@ -3041,8 +3306,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -3051,7 +3315,11 @@ def list_operations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3101,8 +3369,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -3111,7 +3378,11 @@ def get_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3165,15 +3436,19 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -3220,15 +3495,19 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def set_iam_policy( self, @@ -3339,7 +3618,8 @@ def set_iam_policy( # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("resource", request_pb.resource),)), + (("resource", request_pb.resource),) + ), ) # Validate the universe domain. @@ -3348,7 +3628,11 @@ def set_iam_policy( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3466,7 +3750,8 @@ def get_iam_policy( # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("resource", request_pb.resource),)), + (("resource", request_pb.resource),) + ), ) # Validate the universe domain. @@ -3475,7 +3760,11 @@ def get_iam_policy( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3531,7 +3820,8 @@ def test_iam_permissions( # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( - (("resource", request_pb.resource),)), + (("resource", request_pb.resource),) + ), ) # Validate the universe domain. @@ -3540,7 +3830,11 @@ def test_iam_permissions( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3590,8 +3884,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -3600,7 +3893,11 @@ def get_location( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3650,8 +3947,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -3660,7 +3956,11 @@ def list_locations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3669,11 +3969,11 @@ def list_locations( raise e -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "EventarcClient", -) +__all__ = ("EventarcClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index 0b116f59f3fa..9929b1307129 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -13,22 +13,38 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.eventarc_v1.types import channel -from google.cloud.eventarc_v1.types import channel_connection -from google.cloud.eventarc_v1.types import discovery -from google.cloud.eventarc_v1.types import eventarc -from google.cloud.eventarc_v1.types import trigger +from google.cloud.eventarc_v1.types import ( + channel, + channel_connection, + discovery, + eventarc, + trigger, +) class ListTriggersPager: @@ -48,14 +64,17 @@ class ListTriggersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., eventarc.ListTriggersResponse], - request: eventarc.ListTriggersRequest, - response: eventarc.ListTriggersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., eventarc.ListTriggersResponse], + request: eventarc.ListTriggersRequest, + response: eventarc.ListTriggersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -88,7 +107,12 @@ def pages(self) -> Iterator[eventarc.ListTriggersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[trigger.Trigger]: @@ -96,7 +120,7 @@ def __iter__(self) -> Iterator[trigger.Trigger]: yield from page.triggers def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListTriggersAsyncPager: @@ -116,14 +140,17 @@ class ListTriggersAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[eventarc.ListTriggersResponse]], - request: eventarc.ListTriggersRequest, - response: eventarc.ListTriggersResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[eventarc.ListTriggersResponse]], + request: eventarc.ListTriggersRequest, + response: eventarc.ListTriggersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -156,8 +183,14 @@ async def pages(self) -> AsyncIterator[eventarc.ListTriggersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[trigger.Trigger]: async def async_generator(): async for page in self.pages: @@ -167,7 +200,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListChannelsPager: @@ -187,14 +220,17 @@ class ListChannelsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., eventarc.ListChannelsResponse], - request: eventarc.ListChannelsRequest, - response: eventarc.ListChannelsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., eventarc.ListChannelsResponse], + request: eventarc.ListChannelsRequest, + response: eventarc.ListChannelsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -227,7 +263,12 @@ def pages(self) -> Iterator[eventarc.ListChannelsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[channel.Channel]: @@ -235,7 +276,7 @@ def __iter__(self) -> Iterator[channel.Channel]: yield from page.channels def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListChannelsAsyncPager: @@ -255,14 +296,17 @@ class ListChannelsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[eventarc.ListChannelsResponse]], - request: eventarc.ListChannelsRequest, - response: eventarc.ListChannelsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[eventarc.ListChannelsResponse]], + request: eventarc.ListChannelsRequest, + response: eventarc.ListChannelsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -295,8 +339,14 @@ async def pages(self) -> AsyncIterator[eventarc.ListChannelsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[channel.Channel]: async def async_generator(): async for page in self.pages: @@ -306,7 +356,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListProvidersPager: @@ -326,14 +376,17 @@ class ListProvidersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., eventarc.ListProvidersResponse], - request: eventarc.ListProvidersRequest, - response: eventarc.ListProvidersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., eventarc.ListProvidersResponse], + request: eventarc.ListProvidersRequest, + response: eventarc.ListProvidersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -366,7 +419,12 @@ def pages(self) -> Iterator[eventarc.ListProvidersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[discovery.Provider]: @@ -374,7 +432,7 @@ def __iter__(self) -> Iterator[discovery.Provider]: yield from page.providers def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListProvidersAsyncPager: @@ -394,14 +452,17 @@ class ListProvidersAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[eventarc.ListProvidersResponse]], - request: eventarc.ListProvidersRequest, - response: eventarc.ListProvidersResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[eventarc.ListProvidersResponse]], + request: eventarc.ListProvidersRequest, + response: eventarc.ListProvidersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -434,8 +495,14 @@ async def pages(self) -> AsyncIterator[eventarc.ListProvidersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[discovery.Provider]: async def async_generator(): async for page in self.pages: @@ -445,7 +512,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListChannelConnectionsPager: @@ -465,14 +532,17 @@ class ListChannelConnectionsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., eventarc.ListChannelConnectionsResponse], - request: eventarc.ListChannelConnectionsRequest, - response: eventarc.ListChannelConnectionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., eventarc.ListChannelConnectionsResponse], + request: eventarc.ListChannelConnectionsRequest, + response: eventarc.ListChannelConnectionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -505,7 +575,12 @@ def pages(self) -> Iterator[eventarc.ListChannelConnectionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[channel_connection.ChannelConnection]: @@ -513,7 +588,7 @@ def __iter__(self) -> Iterator[channel_connection.ChannelConnection]: yield from page.channel_connections def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListChannelConnectionsAsyncPager: @@ -533,14 +608,17 @@ class ListChannelConnectionsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[eventarc.ListChannelConnectionsResponse]], - request: eventarc.ListChannelConnectionsRequest, - response: eventarc.ListChannelConnectionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[eventarc.ListChannelConnectionsResponse]], + request: eventarc.ListChannelConnectionsRequest, + response: eventarc.ListChannelConnectionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -573,8 +651,14 @@ async def pages(self) -> AsyncIterator[eventarc.ListChannelConnectionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[channel_connection.ChannelConnection]: async def async_generator(): async for page in self.pages: @@ -584,4 +668,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py index c6c13c6a4c6f..f76a027acbe7 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py @@ -19,20 +19,18 @@ from .base import EventarcTransport from .grpc import EventarcGrpcTransport from .grpc_asyncio import EventarcGrpcAsyncIOTransport -from .rest import EventarcRestTransport -from .rest import EventarcRestInterceptor - +from .rest import EventarcRestInterceptor, EventarcRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[EventarcTransport]] -_transport_registry['grpc'] = EventarcGrpcTransport -_transport_registry['grpc_asyncio'] = EventarcGrpcAsyncIOTransport -_transport_registry['rest'] = EventarcRestTransport +_transport_registry["grpc"] = EventarcGrpcTransport +_transport_registry["grpc_asyncio"] = EventarcGrpcAsyncIOTransport +_transport_registry["rest"] = EventarcRestTransport __all__ = ( - 'EventarcTransport', - 'EventarcGrpcTransport', - 'EventarcGrpcAsyncIOTransport', - 'EventarcRestTransport', - 'EventarcRestInterceptor', + "EventarcTransport", + "EventarcGrpcTransport", + "EventarcGrpcAsyncIOTransport", + "EventarcRestTransport", + "EventarcRestInterceptor", ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 88f000dd123f..749fab589507 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -16,31 +16,36 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.eventarc_v1 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core +import google.auth # type: ignore +import google.protobuf from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.eventarc_v1.types import channel -from google.cloud.eventarc_v1.types import channel_connection -from google.cloud.eventarc_v1.types import discovery -from google.cloud.eventarc_v1.types import eventarc -from google.cloud.eventarc_v1.types import google_channel_config -from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config -from google.cloud.eventarc_v1.types import trigger -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.cloud.eventarc_v1 import gapic_version as package_version +from google.cloud.eventarc_v1.types import ( + channel, + channel_connection, + discovery, + eventarc, + google_channel_config, + trigger, +) +from google.cloud.eventarc_v1.types import ( + google_channel_config as gce_google_channel_config, +) +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import ( + iam_policy_pb2, # type: ignore + policy_pb2, # type: ignore +) +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -49,24 +54,23 @@ class EventarcTransport(abc.ABC): """Abstract transport class for Eventarc.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = 'eventarc.googleapis.com' + DEFAULT_HOST: str = "eventarc.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -105,31 +109,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host self._wrapped_methods: Dict[Callable, Callable] = {} @@ -276,14 +292,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -293,165 +309,177 @@ def operations_client(self): raise NotImplementedError() @property - def get_trigger(self) -> Callable[ - [eventarc.GetTriggerRequest], - Union[ - trigger.Trigger, - Awaitable[trigger.Trigger] - ]]: + def get_trigger( + self, + ) -> Callable[ + [eventarc.GetTriggerRequest], Union[trigger.Trigger, Awaitable[trigger.Trigger]] + ]: raise NotImplementedError() @property - def list_triggers(self) -> Callable[ - [eventarc.ListTriggersRequest], - Union[ - eventarc.ListTriggersResponse, - Awaitable[eventarc.ListTriggersResponse] - ]]: + def list_triggers( + self, + ) -> Callable[ + [eventarc.ListTriggersRequest], + Union[eventarc.ListTriggersResponse, Awaitable[eventarc.ListTriggersResponse]], + ]: raise NotImplementedError() @property - def create_trigger(self) -> Callable[ - [eventarc.CreateTriggerRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_trigger( + self, + ) -> Callable[ + [eventarc.CreateTriggerRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def update_trigger(self) -> Callable[ - [eventarc.UpdateTriggerRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_trigger( + self, + ) -> Callable[ + [eventarc.UpdateTriggerRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_trigger(self) -> Callable[ - [eventarc.DeleteTriggerRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_trigger( + self, + ) -> Callable[ + [eventarc.DeleteTriggerRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def get_channel(self) -> Callable[ - [eventarc.GetChannelRequest], - Union[ - channel.Channel, - Awaitable[channel.Channel] - ]]: + def get_channel( + self, + ) -> Callable[ + [eventarc.GetChannelRequest], Union[channel.Channel, Awaitable[channel.Channel]] + ]: raise NotImplementedError() @property - def list_channels(self) -> Callable[ - [eventarc.ListChannelsRequest], - Union[ - eventarc.ListChannelsResponse, - Awaitable[eventarc.ListChannelsResponse] - ]]: + def list_channels( + self, + ) -> Callable[ + [eventarc.ListChannelsRequest], + Union[eventarc.ListChannelsResponse, Awaitable[eventarc.ListChannelsResponse]], + ]: raise NotImplementedError() @property - def create_channel_(self) -> Callable[ - [eventarc.CreateChannelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_channel_( + self, + ) -> Callable[ + [eventarc.CreateChannelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def update_channel(self) -> Callable[ - [eventarc.UpdateChannelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_channel( + self, + ) -> Callable[ + [eventarc.UpdateChannelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_channel(self) -> Callable[ - [eventarc.DeleteChannelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_channel( + self, + ) -> Callable[ + [eventarc.DeleteChannelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def get_provider(self) -> Callable[ - [eventarc.GetProviderRequest], - Union[ - discovery.Provider, - Awaitable[discovery.Provider] - ]]: + def get_provider( + self, + ) -> Callable[ + [eventarc.GetProviderRequest], + Union[discovery.Provider, Awaitable[discovery.Provider]], + ]: raise NotImplementedError() @property - def list_providers(self) -> Callable[ - [eventarc.ListProvidersRequest], - Union[ - eventarc.ListProvidersResponse, - Awaitable[eventarc.ListProvidersResponse] - ]]: + def list_providers( + self, + ) -> Callable[ + [eventarc.ListProvidersRequest], + Union[ + eventarc.ListProvidersResponse, Awaitable[eventarc.ListProvidersResponse] + ], + ]: raise NotImplementedError() @property - def get_channel_connection(self) -> Callable[ - [eventarc.GetChannelConnectionRequest], - Union[ - channel_connection.ChannelConnection, - Awaitable[channel_connection.ChannelConnection] - ]]: + def get_channel_connection( + self, + ) -> Callable[ + [eventarc.GetChannelConnectionRequest], + Union[ + channel_connection.ChannelConnection, + Awaitable[channel_connection.ChannelConnection], + ], + ]: raise NotImplementedError() @property - def list_channel_connections(self) -> Callable[ - [eventarc.ListChannelConnectionsRequest], - Union[ - eventarc.ListChannelConnectionsResponse, - Awaitable[eventarc.ListChannelConnectionsResponse] - ]]: + def list_channel_connections( + self, + ) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + Union[ + eventarc.ListChannelConnectionsResponse, + Awaitable[eventarc.ListChannelConnectionsResponse], + ], + ]: raise NotImplementedError() @property - def create_channel_connection(self) -> Callable[ - [eventarc.CreateChannelConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_channel_connection( + self, + ) -> Callable[ + [eventarc.CreateChannelConnectionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_channel_connection(self) -> Callable[ - [eventarc.DeleteChannelConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_channel_connection( + self, + ) -> Callable[ + [eventarc.DeleteChannelConnectionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def get_google_channel_config(self) -> Callable[ - [eventarc.GetGoogleChannelConfigRequest], - Union[ - google_channel_config.GoogleChannelConfig, - Awaitable[google_channel_config.GoogleChannelConfig] - ]]: + def get_google_channel_config( + self, + ) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + Union[ + google_channel_config.GoogleChannelConfig, + Awaitable[google_channel_config.GoogleChannelConfig], + ], + ]: raise NotImplementedError() @property - def update_google_channel_config(self) -> Callable[ - [eventarc.UpdateGoogleChannelConfigRequest], - Union[ - gce_google_channel_config.GoogleChannelConfig, - Awaitable[gce_google_channel_config.GoogleChannelConfig] - ]]: + def update_google_channel_config( + self, + ) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + Union[ + gce_google_channel_config.GoogleChannelConfig, + Awaitable[gce_google_channel_config.GoogleChannelConfig], + ], + ]: raise NotImplementedError() @property @@ -459,7 +487,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -521,7 +552,8 @@ def test_iam_permissions( raise NotImplementedError() @property - def get_location(self, + def get_location( + self, ) -> Callable[ [locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], @@ -529,10 +561,14 @@ def get_location(self, raise NotImplementedError() @property - def list_locations(self, + def list_locations( + self, ) -> Callable[ [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], ]: raise NotImplementedError() @@ -541,6 +577,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'EventarcTransport', -) +__all__ = ("EventarcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 4a70c191eaa5..6bfb857d7f70 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -19,33 +19,37 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson +import google.auth # type: ignore import google.protobuf.message - import grpc # type: ignore import proto # type: ignore +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.eventarc_v1.types import ( + channel, + channel_connection, + discovery, + eventarc, + google_channel_config, + trigger, +) +from google.cloud.eventarc_v1.types import ( + google_channel_config as gce_google_channel_config, +) +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import ( + iam_policy_pb2, # type: ignore + policy_pb2, # type: ignore +) +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson -from google.cloud.eventarc_v1.types import channel -from google.cloud.eventarc_v1.types import channel_connection -from google.cloud.eventarc_v1.types import discovery -from google.cloud.eventarc_v1.types import eventarc -from google.cloud.eventarc_v1.types import google_channel_config -from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config -from google.cloud.eventarc_v1.types import trigger -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import EventarcTransport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, EventarcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -55,7 +59,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -76,7 +82,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -87,7 +93,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -102,7 +112,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": client_call_details.method, "response": grpc_response, @@ -126,23 +136,26 @@ class EventarcGrpcTransport(EventarcTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "eventarc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -270,19 +283,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "eventarc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -318,13 +335,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -344,9 +360,7 @@ def operations_client(self) -> operations_v1.OperationsClient: return self._operations_client @property - def get_trigger(self) -> Callable[ - [eventarc.GetTriggerRequest], - trigger.Trigger]: + def get_trigger(self) -> Callable[[eventarc.GetTriggerRequest], trigger.Trigger]: r"""Return a callable for the get trigger method over gRPC. Get a single trigger. @@ -361,18 +375,18 @@ def get_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_trigger' not in self._stubs: - self._stubs['get_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetTrigger', + if "get_trigger" not in self._stubs: + self._stubs["get_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetTrigger", request_serializer=eventarc.GetTriggerRequest.serialize, response_deserializer=trigger.Trigger.deserialize, ) - return self._stubs['get_trigger'] + return self._stubs["get_trigger"] @property - def list_triggers(self) -> Callable[ - [eventarc.ListTriggersRequest], - eventarc.ListTriggersResponse]: + def list_triggers( + self, + ) -> Callable[[eventarc.ListTriggersRequest], eventarc.ListTriggersResponse]: r"""Return a callable for the list triggers method over gRPC. List triggers. @@ -387,18 +401,18 @@ def list_triggers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_triggers' not in self._stubs: - self._stubs['list_triggers'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListTriggers', + if "list_triggers" not in self._stubs: + self._stubs["list_triggers"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListTriggers", request_serializer=eventarc.ListTriggersRequest.serialize, response_deserializer=eventarc.ListTriggersResponse.deserialize, ) - return self._stubs['list_triggers'] + return self._stubs["list_triggers"] @property - def create_trigger(self) -> Callable[ - [eventarc.CreateTriggerRequest], - operations_pb2.Operation]: + def create_trigger( + self, + ) -> Callable[[eventarc.CreateTriggerRequest], operations_pb2.Operation]: r"""Return a callable for the create trigger method over gRPC. Create a new trigger in a particular project and @@ -414,18 +428,18 @@ def create_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_trigger' not in self._stubs: - self._stubs['create_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateTrigger', + if "create_trigger" not in self._stubs: + self._stubs["create_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateTrigger", request_serializer=eventarc.CreateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_trigger'] + return self._stubs["create_trigger"] @property - def update_trigger(self) -> Callable[ - [eventarc.UpdateTriggerRequest], - operations_pb2.Operation]: + def update_trigger( + self, + ) -> Callable[[eventarc.UpdateTriggerRequest], operations_pb2.Operation]: r"""Return a callable for the update trigger method over gRPC. Update a single trigger. @@ -440,18 +454,18 @@ def update_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_trigger' not in self._stubs: - self._stubs['update_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateTrigger', + if "update_trigger" not in self._stubs: + self._stubs["update_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateTrigger", request_serializer=eventarc.UpdateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_trigger'] + return self._stubs["update_trigger"] @property - def delete_trigger(self) -> Callable[ - [eventarc.DeleteTriggerRequest], - operations_pb2.Operation]: + def delete_trigger( + self, + ) -> Callable[[eventarc.DeleteTriggerRequest], operations_pb2.Operation]: r"""Return a callable for the delete trigger method over gRPC. Delete a single trigger. @@ -466,18 +480,16 @@ def delete_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_trigger' not in self._stubs: - self._stubs['delete_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteTrigger', + if "delete_trigger" not in self._stubs: + self._stubs["delete_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteTrigger", request_serializer=eventarc.DeleteTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_trigger'] + return self._stubs["delete_trigger"] @property - def get_channel(self) -> Callable[ - [eventarc.GetChannelRequest], - channel.Channel]: + def get_channel(self) -> Callable[[eventarc.GetChannelRequest], channel.Channel]: r"""Return a callable for the get channel method over gRPC. Get a single Channel. @@ -492,18 +504,18 @@ def get_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_channel' not in self._stubs: - self._stubs['get_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetChannel', + if "get_channel" not in self._stubs: + self._stubs["get_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetChannel", request_serializer=eventarc.GetChannelRequest.serialize, response_deserializer=channel.Channel.deserialize, ) - return self._stubs['get_channel'] + return self._stubs["get_channel"] @property - def list_channels(self) -> Callable[ - [eventarc.ListChannelsRequest], - eventarc.ListChannelsResponse]: + def list_channels( + self, + ) -> Callable[[eventarc.ListChannelsRequest], eventarc.ListChannelsResponse]: r"""Return a callable for the list channels method over gRPC. List channels. @@ -518,18 +530,18 @@ def list_channels(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_channels' not in self._stubs: - self._stubs['list_channels'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListChannels', + if "list_channels" not in self._stubs: + self._stubs["list_channels"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListChannels", request_serializer=eventarc.ListChannelsRequest.serialize, response_deserializer=eventarc.ListChannelsResponse.deserialize, ) - return self._stubs['list_channels'] + return self._stubs["list_channels"] @property - def create_channel_(self) -> Callable[ - [eventarc.CreateChannelRequest], - operations_pb2.Operation]: + def create_channel_( + self, + ) -> Callable[[eventarc.CreateChannelRequest], operations_pb2.Operation]: r"""Return a callable for the create channel method over gRPC. Create a new channel in a particular project and @@ -545,18 +557,18 @@ def create_channel_(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_channel_' not in self._stubs: - self._stubs['create_channel_'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateChannel', + if "create_channel_" not in self._stubs: + self._stubs["create_channel_"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateChannel", request_serializer=eventarc.CreateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_channel_'] + return self._stubs["create_channel_"] @property - def update_channel(self) -> Callable[ - [eventarc.UpdateChannelRequest], - operations_pb2.Operation]: + def update_channel( + self, + ) -> Callable[[eventarc.UpdateChannelRequest], operations_pb2.Operation]: r"""Return a callable for the update channel method over gRPC. Update a single channel. @@ -571,18 +583,18 @@ def update_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_channel' not in self._stubs: - self._stubs['update_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', + if "update_channel" not in self._stubs: + self._stubs["update_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateChannel", request_serializer=eventarc.UpdateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_channel'] + return self._stubs["update_channel"] @property - def delete_channel(self) -> Callable[ - [eventarc.DeleteChannelRequest], - operations_pb2.Operation]: + def delete_channel( + self, + ) -> Callable[[eventarc.DeleteChannelRequest], operations_pb2.Operation]: r"""Return a callable for the delete channel method over gRPC. Delete a single channel. @@ -597,18 +609,18 @@ def delete_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_channel' not in self._stubs: - self._stubs['delete_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', + if "delete_channel" not in self._stubs: + self._stubs["delete_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteChannel", request_serializer=eventarc.DeleteChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_channel'] + return self._stubs["delete_channel"] @property - def get_provider(self) -> Callable[ - [eventarc.GetProviderRequest], - discovery.Provider]: + def get_provider( + self, + ) -> Callable[[eventarc.GetProviderRequest], discovery.Provider]: r"""Return a callable for the get provider method over gRPC. Get a single Provider. @@ -623,18 +635,18 @@ def get_provider(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_provider' not in self._stubs: - self._stubs['get_provider'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetProvider', + if "get_provider" not in self._stubs: + self._stubs["get_provider"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetProvider", request_serializer=eventarc.GetProviderRequest.serialize, response_deserializer=discovery.Provider.deserialize, ) - return self._stubs['get_provider'] + return self._stubs["get_provider"] @property - def list_providers(self) -> Callable[ - [eventarc.ListProvidersRequest], - eventarc.ListProvidersResponse]: + def list_providers( + self, + ) -> Callable[[eventarc.ListProvidersRequest], eventarc.ListProvidersResponse]: r"""Return a callable for the list providers method over gRPC. List providers. @@ -649,18 +661,20 @@ def list_providers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_providers' not in self._stubs: - self._stubs['list_providers'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListProviders', + if "list_providers" not in self._stubs: + self._stubs["list_providers"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListProviders", request_serializer=eventarc.ListProvidersRequest.serialize, response_deserializer=eventarc.ListProvidersResponse.deserialize, ) - return self._stubs['list_providers'] + return self._stubs["list_providers"] @property - def get_channel_connection(self) -> Callable[ - [eventarc.GetChannelConnectionRequest], - channel_connection.ChannelConnection]: + def get_channel_connection( + self, + ) -> Callable[ + [eventarc.GetChannelConnectionRequest], channel_connection.ChannelConnection + ]: r"""Return a callable for the get channel connection method over gRPC. Get a single ChannelConnection. @@ -675,18 +689,21 @@ def get_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_channel_connection' not in self._stubs: - self._stubs['get_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', + if "get_channel_connection" not in self._stubs: + self._stubs["get_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetChannelConnection", request_serializer=eventarc.GetChannelConnectionRequest.serialize, response_deserializer=channel_connection.ChannelConnection.deserialize, ) - return self._stubs['get_channel_connection'] + return self._stubs["get_channel_connection"] @property - def list_channel_connections(self) -> Callable[ - [eventarc.ListChannelConnectionsRequest], - eventarc.ListChannelConnectionsResponse]: + def list_channel_connections( + self, + ) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + eventarc.ListChannelConnectionsResponse, + ]: r"""Return a callable for the list channel connections method over gRPC. List channel connections. @@ -701,18 +718,18 @@ def list_channel_connections(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_channel_connections' not in self._stubs: - self._stubs['list_channel_connections'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', + if "list_channel_connections" not in self._stubs: + self._stubs["list_channel_connections"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListChannelConnections", request_serializer=eventarc.ListChannelConnectionsRequest.serialize, response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, ) - return self._stubs['list_channel_connections'] + return self._stubs["list_channel_connections"] @property - def create_channel_connection(self) -> Callable[ - [eventarc.CreateChannelConnectionRequest], - operations_pb2.Operation]: + def create_channel_connection( + self, + ) -> Callable[[eventarc.CreateChannelConnectionRequest], operations_pb2.Operation]: r"""Return a callable for the create channel connection method over gRPC. Create a new ChannelConnection in a particular @@ -728,18 +745,18 @@ def create_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_channel_connection' not in self._stubs: - self._stubs['create_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', + if "create_channel_connection" not in self._stubs: + self._stubs["create_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection", request_serializer=eventarc.CreateChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_channel_connection'] + return self._stubs["create_channel_connection"] @property - def delete_channel_connection(self) -> Callable[ - [eventarc.DeleteChannelConnectionRequest], - operations_pb2.Operation]: + def delete_channel_connection( + self, + ) -> Callable[[eventarc.DeleteChannelConnectionRequest], operations_pb2.Operation]: r"""Return a callable for the delete channel connection method over gRPC. Delete a single ChannelConnection. @@ -754,18 +771,21 @@ def delete_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_channel_connection' not in self._stubs: - self._stubs['delete_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', + if "delete_channel_connection" not in self._stubs: + self._stubs["delete_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection", request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_channel_connection'] + return self._stubs["delete_channel_connection"] @property - def get_google_channel_config(self) -> Callable[ - [eventarc.GetGoogleChannelConfigRequest], - google_channel_config.GoogleChannelConfig]: + def get_google_channel_config( + self, + ) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + google_channel_config.GoogleChannelConfig, + ]: r"""Return a callable for the get google channel config method over gRPC. Get a GoogleChannelConfig @@ -780,18 +800,21 @@ def get_google_channel_config(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_google_channel_config' not in self._stubs: - self._stubs['get_google_channel_config'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', + if "get_google_channel_config" not in self._stubs: + self._stubs["get_google_channel_config"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig", request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, ) - return self._stubs['get_google_channel_config'] + return self._stubs["get_google_channel_config"] @property - def update_google_channel_config(self) -> Callable[ - [eventarc.UpdateGoogleChannelConfigRequest], - gce_google_channel_config.GoogleChannelConfig]: + def update_google_channel_config( + self, + ) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + gce_google_channel_config.GoogleChannelConfig, + ]: r"""Return a callable for the update google channel config method over gRPC. Update a single GoogleChannelConfig @@ -806,13 +829,15 @@ def update_google_channel_config(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_google_channel_config' not in self._stubs: - self._stubs['update_google_channel_config'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', - request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, - response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, + if "update_google_channel_config" not in self._stubs: + self._stubs["update_google_channel_config"] = ( + self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig", + request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, + response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, + ) ) - return self._stubs['update_google_channel_config'] + return self._stubs["update_google_channel_config"] def close(self): self._logged_channel.close() @@ -821,8 +846,7 @@ def close(self): def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -839,8 +863,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -857,8 +880,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -874,9 +896,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -892,9 +915,10 @@ def list_operations( @property def list_locations( self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -911,8 +935,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -980,7 +1003,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, ]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control @@ -1009,6 +1033,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'EventarcGrpcTransport', -) +__all__ = ("EventarcGrpcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 891535c53074..d25005ad5c73 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -15,41 +15,45 @@ # import inspect import json -import pickle import logging as std_logging +import pickle import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.eventarc_v1.types import ( + channel, + channel_connection, + discovery, + eventarc, + google_channel_config, + trigger, +) +from google.cloud.eventarc_v1.types import ( + google_channel_config as gce_google_channel_config, +) +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import ( + iam_policy_pb2, # type: ignore + policy_pb2, # type: ignore +) +from google.longrunning import operations_pb2 # type: ignore from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.eventarc_v1.types import channel -from google.cloud.eventarc_v1.types import channel_connection -from google.cloud.eventarc_v1.types import discovery -from google.cloud.eventarc_v1.types import eventarc -from google.cloud.eventarc_v1.types import google_channel_config -from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config -from google.cloud.eventarc_v1.types import trigger -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import EventarcTransport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, EventarcTransport from .grpc import EventarcGrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -57,9 +61,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -80,7 +88,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -91,7 +99,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -106,7 +118,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -135,13 +147,15 @@ class EventarcGrpcAsyncIOTransport(EventarcTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "eventarc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -172,24 +186,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "eventarc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -319,7 +335,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -350,9 +368,9 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: return self._operations_client @property - def get_trigger(self) -> Callable[ - [eventarc.GetTriggerRequest], - Awaitable[trigger.Trigger]]: + def get_trigger( + self, + ) -> Callable[[eventarc.GetTriggerRequest], Awaitable[trigger.Trigger]]: r"""Return a callable for the get trigger method over gRPC. Get a single trigger. @@ -367,18 +385,20 @@ def get_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_trigger' not in self._stubs: - self._stubs['get_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetTrigger', + if "get_trigger" not in self._stubs: + self._stubs["get_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetTrigger", request_serializer=eventarc.GetTriggerRequest.serialize, response_deserializer=trigger.Trigger.deserialize, ) - return self._stubs['get_trigger'] + return self._stubs["get_trigger"] @property - def list_triggers(self) -> Callable[ - [eventarc.ListTriggersRequest], - Awaitable[eventarc.ListTriggersResponse]]: + def list_triggers( + self, + ) -> Callable[ + [eventarc.ListTriggersRequest], Awaitable[eventarc.ListTriggersResponse] + ]: r"""Return a callable for the list triggers method over gRPC. List triggers. @@ -393,18 +413,18 @@ def list_triggers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_triggers' not in self._stubs: - self._stubs['list_triggers'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListTriggers', + if "list_triggers" not in self._stubs: + self._stubs["list_triggers"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListTriggers", request_serializer=eventarc.ListTriggersRequest.serialize, response_deserializer=eventarc.ListTriggersResponse.deserialize, ) - return self._stubs['list_triggers'] + return self._stubs["list_triggers"] @property - def create_trigger(self) -> Callable[ - [eventarc.CreateTriggerRequest], - Awaitable[operations_pb2.Operation]]: + def create_trigger( + self, + ) -> Callable[[eventarc.CreateTriggerRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create trigger method over gRPC. Create a new trigger in a particular project and @@ -420,18 +440,18 @@ def create_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_trigger' not in self._stubs: - self._stubs['create_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateTrigger', + if "create_trigger" not in self._stubs: + self._stubs["create_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateTrigger", request_serializer=eventarc.CreateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_trigger'] + return self._stubs["create_trigger"] @property - def update_trigger(self) -> Callable[ - [eventarc.UpdateTriggerRequest], - Awaitable[operations_pb2.Operation]]: + def update_trigger( + self, + ) -> Callable[[eventarc.UpdateTriggerRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the update trigger method over gRPC. Update a single trigger. @@ -446,18 +466,18 @@ def update_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_trigger' not in self._stubs: - self._stubs['update_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateTrigger', + if "update_trigger" not in self._stubs: + self._stubs["update_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateTrigger", request_serializer=eventarc.UpdateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_trigger'] + return self._stubs["update_trigger"] @property - def delete_trigger(self) -> Callable[ - [eventarc.DeleteTriggerRequest], - Awaitable[operations_pb2.Operation]]: + def delete_trigger( + self, + ) -> Callable[[eventarc.DeleteTriggerRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete trigger method over gRPC. Delete a single trigger. @@ -472,18 +492,18 @@ def delete_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_trigger' not in self._stubs: - self._stubs['delete_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteTrigger', + if "delete_trigger" not in self._stubs: + self._stubs["delete_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteTrigger", request_serializer=eventarc.DeleteTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_trigger'] + return self._stubs["delete_trigger"] @property - def get_channel(self) -> Callable[ - [eventarc.GetChannelRequest], - Awaitable[channel.Channel]]: + def get_channel( + self, + ) -> Callable[[eventarc.GetChannelRequest], Awaitable[channel.Channel]]: r"""Return a callable for the get channel method over gRPC. Get a single Channel. @@ -498,18 +518,20 @@ def get_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_channel' not in self._stubs: - self._stubs['get_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetChannel', + if "get_channel" not in self._stubs: + self._stubs["get_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetChannel", request_serializer=eventarc.GetChannelRequest.serialize, response_deserializer=channel.Channel.deserialize, ) - return self._stubs['get_channel'] + return self._stubs["get_channel"] @property - def list_channels(self) -> Callable[ - [eventarc.ListChannelsRequest], - Awaitable[eventarc.ListChannelsResponse]]: + def list_channels( + self, + ) -> Callable[ + [eventarc.ListChannelsRequest], Awaitable[eventarc.ListChannelsResponse] + ]: r"""Return a callable for the list channels method over gRPC. List channels. @@ -524,18 +546,18 @@ def list_channels(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_channels' not in self._stubs: - self._stubs['list_channels'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListChannels', + if "list_channels" not in self._stubs: + self._stubs["list_channels"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListChannels", request_serializer=eventarc.ListChannelsRequest.serialize, response_deserializer=eventarc.ListChannelsResponse.deserialize, ) - return self._stubs['list_channels'] + return self._stubs["list_channels"] @property - def create_channel_(self) -> Callable[ - [eventarc.CreateChannelRequest], - Awaitable[operations_pb2.Operation]]: + def create_channel_( + self, + ) -> Callable[[eventarc.CreateChannelRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create channel method over gRPC. Create a new channel in a particular project and @@ -551,18 +573,18 @@ def create_channel_(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_channel_' not in self._stubs: - self._stubs['create_channel_'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateChannel', + if "create_channel_" not in self._stubs: + self._stubs["create_channel_"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateChannel", request_serializer=eventarc.CreateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_channel_'] + return self._stubs["create_channel_"] @property - def update_channel(self) -> Callable[ - [eventarc.UpdateChannelRequest], - Awaitable[operations_pb2.Operation]]: + def update_channel( + self, + ) -> Callable[[eventarc.UpdateChannelRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the update channel method over gRPC. Update a single channel. @@ -577,18 +599,18 @@ def update_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_channel' not in self._stubs: - self._stubs['update_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', + if "update_channel" not in self._stubs: + self._stubs["update_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateChannel", request_serializer=eventarc.UpdateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_channel'] + return self._stubs["update_channel"] @property - def delete_channel(self) -> Callable[ - [eventarc.DeleteChannelRequest], - Awaitable[operations_pb2.Operation]]: + def delete_channel( + self, + ) -> Callable[[eventarc.DeleteChannelRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete channel method over gRPC. Delete a single channel. @@ -603,18 +625,18 @@ def delete_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_channel' not in self._stubs: - self._stubs['delete_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', + if "delete_channel" not in self._stubs: + self._stubs["delete_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteChannel", request_serializer=eventarc.DeleteChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_channel'] + return self._stubs["delete_channel"] @property - def get_provider(self) -> Callable[ - [eventarc.GetProviderRequest], - Awaitable[discovery.Provider]]: + def get_provider( + self, + ) -> Callable[[eventarc.GetProviderRequest], Awaitable[discovery.Provider]]: r"""Return a callable for the get provider method over gRPC. Get a single Provider. @@ -629,18 +651,20 @@ def get_provider(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_provider' not in self._stubs: - self._stubs['get_provider'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetProvider', + if "get_provider" not in self._stubs: + self._stubs["get_provider"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetProvider", request_serializer=eventarc.GetProviderRequest.serialize, response_deserializer=discovery.Provider.deserialize, ) - return self._stubs['get_provider'] + return self._stubs["get_provider"] @property - def list_providers(self) -> Callable[ - [eventarc.ListProvidersRequest], - Awaitable[eventarc.ListProvidersResponse]]: + def list_providers( + self, + ) -> Callable[ + [eventarc.ListProvidersRequest], Awaitable[eventarc.ListProvidersResponse] + ]: r"""Return a callable for the list providers method over gRPC. List providers. @@ -655,18 +679,21 @@ def list_providers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_providers' not in self._stubs: - self._stubs['list_providers'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListProviders', + if "list_providers" not in self._stubs: + self._stubs["list_providers"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListProviders", request_serializer=eventarc.ListProvidersRequest.serialize, response_deserializer=eventarc.ListProvidersResponse.deserialize, ) - return self._stubs['list_providers'] + return self._stubs["list_providers"] @property - def get_channel_connection(self) -> Callable[ - [eventarc.GetChannelConnectionRequest], - Awaitable[channel_connection.ChannelConnection]]: + def get_channel_connection( + self, + ) -> Callable[ + [eventarc.GetChannelConnectionRequest], + Awaitable[channel_connection.ChannelConnection], + ]: r"""Return a callable for the get channel connection method over gRPC. Get a single ChannelConnection. @@ -681,18 +708,21 @@ def get_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_channel_connection' not in self._stubs: - self._stubs['get_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', + if "get_channel_connection" not in self._stubs: + self._stubs["get_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetChannelConnection", request_serializer=eventarc.GetChannelConnectionRequest.serialize, response_deserializer=channel_connection.ChannelConnection.deserialize, ) - return self._stubs['get_channel_connection'] + return self._stubs["get_channel_connection"] @property - def list_channel_connections(self) -> Callable[ - [eventarc.ListChannelConnectionsRequest], - Awaitable[eventarc.ListChannelConnectionsResponse]]: + def list_channel_connections( + self, + ) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + Awaitable[eventarc.ListChannelConnectionsResponse], + ]: r"""Return a callable for the list channel connections method over gRPC. List channel connections. @@ -707,18 +737,20 @@ def list_channel_connections(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_channel_connections' not in self._stubs: - self._stubs['list_channel_connections'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', + if "list_channel_connections" not in self._stubs: + self._stubs["list_channel_connections"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListChannelConnections", request_serializer=eventarc.ListChannelConnectionsRequest.serialize, response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, ) - return self._stubs['list_channel_connections'] + return self._stubs["list_channel_connections"] @property - def create_channel_connection(self) -> Callable[ - [eventarc.CreateChannelConnectionRequest], - Awaitable[operations_pb2.Operation]]: + def create_channel_connection( + self, + ) -> Callable[ + [eventarc.CreateChannelConnectionRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the create channel connection method over gRPC. Create a new ChannelConnection in a particular @@ -734,18 +766,20 @@ def create_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_channel_connection' not in self._stubs: - self._stubs['create_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', + if "create_channel_connection" not in self._stubs: + self._stubs["create_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection", request_serializer=eventarc.CreateChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_channel_connection'] + return self._stubs["create_channel_connection"] @property - def delete_channel_connection(self) -> Callable[ - [eventarc.DeleteChannelConnectionRequest], - Awaitable[operations_pb2.Operation]]: + def delete_channel_connection( + self, + ) -> Callable[ + [eventarc.DeleteChannelConnectionRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the delete channel connection method over gRPC. Delete a single ChannelConnection. @@ -760,18 +794,21 @@ def delete_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_channel_connection' not in self._stubs: - self._stubs['delete_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', + if "delete_channel_connection" not in self._stubs: + self._stubs["delete_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection", request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_channel_connection'] + return self._stubs["delete_channel_connection"] @property - def get_google_channel_config(self) -> Callable[ - [eventarc.GetGoogleChannelConfigRequest], - Awaitable[google_channel_config.GoogleChannelConfig]]: + def get_google_channel_config( + self, + ) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + Awaitable[google_channel_config.GoogleChannelConfig], + ]: r"""Return a callable for the get google channel config method over gRPC. Get a GoogleChannelConfig @@ -786,18 +823,21 @@ def get_google_channel_config(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_google_channel_config' not in self._stubs: - self._stubs['get_google_channel_config'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', + if "get_google_channel_config" not in self._stubs: + self._stubs["get_google_channel_config"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig", request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, ) - return self._stubs['get_google_channel_config'] + return self._stubs["get_google_channel_config"] @property - def update_google_channel_config(self) -> Callable[ - [eventarc.UpdateGoogleChannelConfigRequest], - Awaitable[gce_google_channel_config.GoogleChannelConfig]]: + def update_google_channel_config( + self, + ) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + Awaitable[gce_google_channel_config.GoogleChannelConfig], + ]: r"""Return a callable for the update google channel config method over gRPC. Update a single GoogleChannelConfig @@ -812,16 +852,18 @@ def update_google_channel_config(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_google_channel_config' not in self._stubs: - self._stubs['update_google_channel_config'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', - request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, - response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, + if "update_google_channel_config" not in self._stubs: + self._stubs["update_google_channel_config"] = ( + self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig", + request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, + response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, + ) ) - return self._stubs['update_google_channel_config'] + return self._stubs["update_google_channel_config"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.get_trigger: self._wrap_method( self.get_trigger, @@ -976,8 +1018,7 @@ def kind(self) -> str: def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -994,8 +1035,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1012,8 +1052,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1029,9 +1068,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1047,9 +1087,10 @@ def list_operations( @property def list_locations( self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1066,8 +1107,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1135,7 +1175,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, ]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control @@ -1160,6 +1201,4 @@ def test_iam_permissions( return self._stubs["test_iam_permissions"] -__all__ = ( - 'EventarcGrpcAsyncIOTransport', -) +__all__ = ("EventarcGrpcAsyncIOTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 935e01dfcc63..873de336658e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -13,42 +13,40 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging +import dataclasses import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +import google.protobuf from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.eventarc_v1.types import ( + channel, + channel_connection, + discovery, + eventarc, + google_channel_config, + trigger, +) +from google.cloud.eventarc_v1.types import ( + google_channel_config as gce_google_channel_config, +) +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import ( + iam_policy_pb2, # type: ignore + policy_pb2, # type: ignore +) +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.eventarc_v1.types import channel -from google.cloud.eventarc_v1.types import channel_connection -from google.cloud.eventarc_v1.types import discovery -from google.cloud.eventarc_v1.types import eventarc -from google.cloud.eventarc_v1.types import google_channel_config -from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config -from google.cloud.eventarc_v1.types import trigger -from google.longrunning import operations_pb2 # type: ignore - -from .rest_base import _BaseEventarcRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseEventarcRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -57,6 +55,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -237,7 +236,12 @@ def post_update_trigger(self, response): """ - def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_create_channel( + self, + request: eventarc.CreateChannelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.CreateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_channel Override in a subclass to manipulate the request or metadata @@ -245,7 +249,9 @@ def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: S """ return request, metadata - def post_create_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_channel( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_channel DEPRECATED. Please use the `post_create_channel_with_metadata` @@ -258,7 +264,11 @@ def post_create_channel(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_create_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_channel_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_channel Override in a subclass to read or manipulate the response or metadata after it @@ -273,7 +283,13 @@ def post_create_channel_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_channel_connection( + self, + request: eventarc.CreateChannelConnectionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_channel_connection Override in a subclass to manipulate the request or metadata @@ -281,7 +297,9 @@ def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectio """ return request, metadata - def post_create_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_channel_connection( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_channel_connection DEPRECATED. Please use the `post_create_channel_connection_with_metadata` @@ -294,7 +312,11 @@ def post_create_channel_connection(self, response: operations_pb2.Operation) -> """ return response - def post_create_channel_connection_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_channel_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_channel_connection Override in a subclass to read or manipulate the response or metadata after it @@ -309,7 +331,11 @@ def post_create_channel_connection_with_metadata(self, response: operations_pb2. """ return response, metadata - def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_trigger( + self, + request: eventarc.CreateTriggerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_trigger Override in a subclass to manipulate the request or metadata @@ -317,7 +343,9 @@ def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: S """ return request, metadata - def post_create_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_trigger( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_trigger DEPRECATED. Please use the `post_create_trigger_with_metadata` @@ -330,7 +358,11 @@ def post_create_trigger(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_create_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_trigger_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_trigger Override in a subclass to read or manipulate the response or metadata after it @@ -345,7 +377,11 @@ def post_create_trigger_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_channel( + self, + request: eventarc.DeleteChannelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_channel Override in a subclass to manipulate the request or metadata @@ -353,7 +389,9 @@ def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: S """ return request, metadata - def post_delete_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_channel( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_channel DEPRECATED. Please use the `post_delete_channel_with_metadata` @@ -366,7 +404,11 @@ def post_delete_channel(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_delete_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_delete_channel_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_channel Override in a subclass to read or manipulate the response or metadata after it @@ -381,7 +423,13 @@ def post_delete_channel_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_channel_connection( + self, + request: eventarc.DeleteChannelConnectionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_channel_connection Override in a subclass to manipulate the request or metadata @@ -389,7 +437,9 @@ def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectio """ return request, metadata - def post_delete_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_channel_connection( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_channel_connection DEPRECATED. Please use the `post_delete_channel_connection_with_metadata` @@ -402,7 +452,11 @@ def post_delete_channel_connection(self, response: operations_pb2.Operation) -> """ return response - def post_delete_channel_connection_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_delete_channel_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_channel_connection Override in a subclass to read or manipulate the response or metadata after it @@ -417,7 +471,11 @@ def post_delete_channel_connection_with_metadata(self, response: operations_pb2. """ return response, metadata - def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_trigger( + self, + request: eventarc.DeleteTriggerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_trigger Override in a subclass to manipulate the request or metadata @@ -425,7 +483,9 @@ def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: S """ return request, metadata - def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_trigger( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_trigger DEPRECATED. Please use the `post_delete_trigger_with_metadata` @@ -438,7 +498,11 @@ def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_delete_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_delete_trigger_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_trigger Override in a subclass to read or manipulate the response or metadata after it @@ -453,7 +517,11 @@ def post_delete_trigger_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_channel( + self, + request: eventarc.GetChannelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_channel Override in a subclass to manipulate the request or metadata @@ -474,7 +542,11 @@ def post_get_channel(self, response: channel.Channel) -> channel.Channel: """ return response - def post_get_channel_with_metadata(self, response: channel.Channel, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel.Channel, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_channel_with_metadata( + self, + response: channel.Channel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[channel.Channel, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_channel Override in a subclass to read or manipulate the response or metadata after it @@ -489,7 +561,13 @@ def post_get_channel_with_metadata(self, response: channel.Channel, metadata: Se """ return response, metadata - def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_channel_connection( + self, + request: eventarc.GetChannelConnectionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_channel_connection Override in a subclass to manipulate the request or metadata @@ -497,7 +575,9 @@ def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionReque """ return request, metadata - def post_get_channel_connection(self, response: channel_connection.ChannelConnection) -> channel_connection.ChannelConnection: + def post_get_channel_connection( + self, response: channel_connection.ChannelConnection + ) -> channel_connection.ChannelConnection: """Post-rpc interceptor for get_channel_connection DEPRECATED. Please use the `post_get_channel_connection_with_metadata` @@ -510,7 +590,13 @@ def post_get_channel_connection(self, response: channel_connection.ChannelConnec """ return response - def post_get_channel_connection_with_metadata(self, response: channel_connection.ChannelConnection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel_connection.ChannelConnection, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_channel_connection_with_metadata( + self, + response: channel_connection.ChannelConnection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + channel_connection.ChannelConnection, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for get_channel_connection Override in a subclass to read or manipulate the response or metadata after it @@ -525,7 +611,13 @@ def post_get_channel_connection_with_metadata(self, response: channel_connection """ return response, metadata - def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_google_channel_config( + self, + request: eventarc.GetGoogleChannelConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_google_channel_config Override in a subclass to manipulate the request or metadata @@ -533,7 +625,9 @@ def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfig """ return request, metadata - def post_get_google_channel_config(self, response: google_channel_config.GoogleChannelConfig) -> google_channel_config.GoogleChannelConfig: + def post_get_google_channel_config( + self, response: google_channel_config.GoogleChannelConfig + ) -> google_channel_config.GoogleChannelConfig: """Post-rpc interceptor for get_google_channel_config DEPRECATED. Please use the `post_get_google_channel_config_with_metadata` @@ -546,7 +640,14 @@ def post_get_google_channel_config(self, response: google_channel_config.GoogleC """ return response - def post_get_google_channel_config_with_metadata(self, response: google_channel_config.GoogleChannelConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[google_channel_config.GoogleChannelConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_google_channel_config_with_metadata( + self, + response: google_channel_config.GoogleChannelConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + google_channel_config.GoogleChannelConfig, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for get_google_channel_config Override in a subclass to read or manipulate the response or metadata after it @@ -561,7 +662,11 @@ def post_get_google_channel_config_with_metadata(self, response: google_channel_ """ return response, metadata - def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_provider( + self, + request: eventarc.GetProviderRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_provider Override in a subclass to manipulate the request or metadata @@ -582,7 +687,11 @@ def post_get_provider(self, response: discovery.Provider) -> discovery.Provider: """ return response - def post_get_provider_with_metadata(self, response: discovery.Provider, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[discovery.Provider, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_provider_with_metadata( + self, + response: discovery.Provider, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[discovery.Provider, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_provider Override in a subclass to read or manipulate the response or metadata after it @@ -597,7 +706,11 @@ def post_get_provider_with_metadata(self, response: discovery.Provider, metadata """ return response, metadata - def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_trigger( + self, + request: eventarc.GetTriggerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.GetTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_trigger Override in a subclass to manipulate the request or metadata @@ -618,7 +731,11 @@ def post_get_trigger(self, response: trigger.Trigger) -> trigger.Trigger: """ return response - def post_get_trigger_with_metadata(self, response: trigger.Trigger, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[trigger.Trigger, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_trigger_with_metadata( + self, + response: trigger.Trigger, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[trigger.Trigger, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_trigger Override in a subclass to read or manipulate the response or metadata after it @@ -633,7 +750,13 @@ def post_get_trigger_with_metadata(self, response: trigger.Trigger, metadata: Se """ return response, metadata - def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_channel_connections( + self, + request: eventarc.ListChannelConnectionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_channel_connections Override in a subclass to manipulate the request or metadata @@ -641,7 +764,9 @@ def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsR """ return request, metadata - def post_list_channel_connections(self, response: eventarc.ListChannelConnectionsResponse) -> eventarc.ListChannelConnectionsResponse: + def post_list_channel_connections( + self, response: eventarc.ListChannelConnectionsResponse + ) -> eventarc.ListChannelConnectionsResponse: """Post-rpc interceptor for list_channel_connections DEPRECATED. Please use the `post_list_channel_connections_with_metadata` @@ -654,7 +779,13 @@ def post_list_channel_connections(self, response: eventarc.ListChannelConnection """ return response - def post_list_channel_connections_with_metadata(self, response: eventarc.ListChannelConnectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_channel_connections_with_metadata( + self, + response: eventarc.ListChannelConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.ListChannelConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for list_channel_connections Override in a subclass to read or manipulate the response or metadata after it @@ -669,7 +800,11 @@ def post_list_channel_connections_with_metadata(self, response: eventarc.ListCha """ return response, metadata - def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_channels( + self, + request: eventarc.ListChannelsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_channels Override in a subclass to manipulate the request or metadata @@ -677,7 +812,9 @@ def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Seq """ return request, metadata - def post_list_channels(self, response: eventarc.ListChannelsResponse) -> eventarc.ListChannelsResponse: + def post_list_channels( + self, response: eventarc.ListChannelsResponse + ) -> eventarc.ListChannelsResponse: """Post-rpc interceptor for list_channels DEPRECATED. Please use the `post_list_channels_with_metadata` @@ -690,7 +827,11 @@ def post_list_channels(self, response: eventarc.ListChannelsResponse) -> eventar """ return response - def post_list_channels_with_metadata(self, response: eventarc.ListChannelsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_channels_with_metadata( + self, + response: eventarc.ListChannelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListChannelsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_channels Override in a subclass to read or manipulate the response or metadata after it @@ -705,7 +846,11 @@ def post_list_channels_with_metadata(self, response: eventarc.ListChannelsRespon """ return response, metadata - def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_providers( + self, + request: eventarc.ListProvidersRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_providers Override in a subclass to manipulate the request or metadata @@ -713,7 +858,9 @@ def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: S """ return request, metadata - def post_list_providers(self, response: eventarc.ListProvidersResponse) -> eventarc.ListProvidersResponse: + def post_list_providers( + self, response: eventarc.ListProvidersResponse + ) -> eventarc.ListProvidersResponse: """Post-rpc interceptor for list_providers DEPRECATED. Please use the `post_list_providers_with_metadata` @@ -726,7 +873,11 @@ def post_list_providers(self, response: eventarc.ListProvidersResponse) -> event """ return response - def post_list_providers_with_metadata(self, response: eventarc.ListProvidersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_providers_with_metadata( + self, + response: eventarc.ListProvidersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListProvidersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_providers Override in a subclass to read or manipulate the response or metadata after it @@ -741,7 +892,11 @@ def post_list_providers_with_metadata(self, response: eventarc.ListProvidersResp """ return response, metadata - def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_triggers( + self, + request: eventarc.ListTriggersRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_triggers Override in a subclass to manipulate the request or metadata @@ -749,7 +904,9 @@ def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Seq """ return request, metadata - def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventarc.ListTriggersResponse: + def post_list_triggers( + self, response: eventarc.ListTriggersResponse + ) -> eventarc.ListTriggersResponse: """Post-rpc interceptor for list_triggers DEPRECATED. Please use the `post_list_triggers_with_metadata` @@ -762,7 +919,11 @@ def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventar """ return response - def post_list_triggers_with_metadata(self, response: eventarc.ListTriggersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_triggers_with_metadata( + self, + response: eventarc.ListTriggersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_triggers Override in a subclass to read or manipulate the response or metadata after it @@ -777,7 +938,11 @@ def post_list_triggers_with_metadata(self, response: eventarc.ListTriggersRespon """ return response, metadata - def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_channel( + self, + request: eventarc.UpdateChannelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_channel Override in a subclass to manipulate the request or metadata @@ -785,7 +950,9 @@ def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: S """ return request, metadata - def post_update_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_channel( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_channel DEPRECATED. Please use the `post_update_channel_with_metadata` @@ -798,7 +965,11 @@ def post_update_channel(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_update_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_channel_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_channel Override in a subclass to read or manipulate the response or metadata after it @@ -813,7 +984,14 @@ def post_update_channel_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_google_channel_config( + self, + request: eventarc.UpdateGoogleChannelConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.UpdateGoogleChannelConfigRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for update_google_channel_config Override in a subclass to manipulate the request or metadata @@ -821,7 +999,9 @@ def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannel """ return request, metadata - def post_update_google_channel_config(self, response: gce_google_channel_config.GoogleChannelConfig) -> gce_google_channel_config.GoogleChannelConfig: + def post_update_google_channel_config( + self, response: gce_google_channel_config.GoogleChannelConfig + ) -> gce_google_channel_config.GoogleChannelConfig: """Post-rpc interceptor for update_google_channel_config DEPRECATED. Please use the `post_update_google_channel_config_with_metadata` @@ -834,7 +1014,14 @@ def post_update_google_channel_config(self, response: gce_google_channel_config. """ return response - def post_update_google_channel_config_with_metadata(self, response: gce_google_channel_config.GoogleChannelConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gce_google_channel_config.GoogleChannelConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_google_channel_config_with_metadata( + self, + response: gce_google_channel_config.GoogleChannelConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gce_google_channel_config.GoogleChannelConfig, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Post-rpc interceptor for update_google_channel_config Override in a subclass to read or manipulate the response or metadata after it @@ -849,7 +1036,11 @@ def post_update_google_channel_config_with_metadata(self, response: gce_google_c """ return response, metadata - def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_trigger( + self, + request: eventarc.UpdateTriggerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_trigger Override in a subclass to manipulate the request or metadata @@ -857,7 +1048,9 @@ def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: S """ return request, metadata - def post_update_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_trigger( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_trigger DEPRECATED. Please use the `post_update_trigger_with_metadata` @@ -870,7 +1063,11 @@ def post_update_trigger(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_update_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_trigger_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_trigger Override in a subclass to read or manipulate the response or metadata after it @@ -886,8 +1083,12 @@ def post_update_trigger_with_metadata(self, response: operations_pb2.Operation, return response, metadata def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -907,8 +1108,12 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -928,8 +1133,12 @@ def post_list_locations( return response def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_iam_policy Override in a subclass to manipulate the request or metadata @@ -937,9 +1146,7 @@ def pre_get_iam_policy( """ return request, metadata - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for get_iam_policy Override in a subclass to manipulate the response @@ -949,8 +1156,12 @@ def post_get_iam_policy( return response def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for set_iam_policy Override in a subclass to manipulate the request or metadata @@ -958,9 +1169,7 @@ def pre_set_iam_policy( """ return request, metadata - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for set_iam_policy Override in a subclass to manipulate the response @@ -970,8 +1179,13 @@ def post_set_iam_policy( return response def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for test_iam_permissions Override in a subclass to manipulate the request or metadata @@ -991,8 +1205,12 @@ def post_test_iam_permissions( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -1000,9 +1218,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: None - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -1012,8 +1228,12 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -1021,9 +1241,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: None - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -1033,8 +1251,12 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -1054,8 +1276,12 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -1096,62 +1322,63 @@ class EventarcRestTransport(_BaseEventarcRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[EventarcRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "eventarc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[EventarcRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to (default: 'eventarc.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - interceptor (Optional[EventarcRestInterceptor]): Interceptor used - to manipulate requests, request metadata, and responses. - api_audience (Optional[str]): The intended audience for the API calls - to the service that will be set when using certain 3rd party - authentication flows. Audience is typically a resource identifier. - If not set, the host value will be used as a default. + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'eventarc.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[EventarcRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -1163,10 +1390,11 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -1183,47 +1411,52 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client - class _CreateChannel(_BaseEventarcRestTransport._BaseCreateChannel, EventarcRestStub): + class _CreateChannel( + _BaseEventarcRestTransport._BaseCreateChannel, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.CreateChannel") @@ -1235,27 +1468,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.CreateChannelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.CreateChannelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create channel method over HTTP. Args: @@ -1278,32 +1513,48 @@ def __call__(self, """ - http_options = _BaseEventarcRestTransport._BaseCreateChannel._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseCreateChannel._get_http_options() + ) request, metadata = self._interceptor.pre_create_channel(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseCreateChannel._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseCreateChannel._get_transcoded_request( + http_options, request + ) + ) - body = _BaseEventarcRestTransport._BaseCreateChannel._get_request_body_json(transcoded_request) + body = _BaseEventarcRestTransport._BaseCreateChannel._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseCreateChannel._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseCreateChannel._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateChannel", "httpRequest": http_request, @@ -1312,7 +1563,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._CreateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._CreateChannel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1325,20 +1584,24 @@ def __call__(self, resp = self._interceptor.post_create_channel(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_channel_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_channel_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateChannel", "metadata": http_response["headers"], @@ -1347,7 +1610,9 @@ def __call__(self, ) return resp - class _CreateChannelConnection(_BaseEventarcRestTransport._BaseCreateChannelConnection, EventarcRestStub): + class _CreateChannelConnection( + _BaseEventarcRestTransport._BaseCreateChannelConnection, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.CreateChannelConnection") @@ -1359,27 +1624,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.CreateChannelConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.CreateChannelConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create channel connection method over HTTP. Args: @@ -1404,30 +1671,42 @@ def __call__(self, http_options = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_http_options() - request, metadata = self._interceptor.pre_create_channel_connection(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_create_channel_connection( + request, metadata + ) + transcoded_request = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_transcoded_request( + http_options, request + ) - body = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_request_body_json(transcoded_request) + body = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannelConnection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateChannelConnection", "httpRequest": http_request, @@ -1436,7 +1715,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._CreateChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._CreateChannelConnection._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1449,20 +1736,24 @@ def __call__(self, resp = self._interceptor.post_create_channel_connection(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_channel_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_channel_connection_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_connection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateChannelConnection", "metadata": http_response["headers"], @@ -1471,7 +1762,9 @@ def __call__(self, ) return resp - class _CreateTrigger(_BaseEventarcRestTransport._BaseCreateTrigger, EventarcRestStub): + class _CreateTrigger( + _BaseEventarcRestTransport._BaseCreateTrigger, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.CreateTrigger") @@ -1483,27 +1776,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.CreateTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.CreateTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create trigger method over HTTP. Args: @@ -1526,32 +1821,48 @@ def __call__(self, """ - http_options = _BaseEventarcRestTransport._BaseCreateTrigger._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseCreateTrigger._get_http_options() + ) request, metadata = self._interceptor.pre_create_trigger(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseCreateTrigger._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseCreateTrigger._get_transcoded_request( + http_options, request + ) + ) - body = _BaseEventarcRestTransport._BaseCreateTrigger._get_request_body_json(transcoded_request) + body = _BaseEventarcRestTransport._BaseCreateTrigger._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseCreateTrigger._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseCreateTrigger._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateTrigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateTrigger", "httpRequest": http_request, @@ -1560,7 +1871,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._CreateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._CreateTrigger._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1573,20 +1892,24 @@ def __call__(self, resp = self._interceptor.post_create_trigger(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_trigger_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.create_trigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateTrigger", "metadata": http_response["headers"], @@ -1595,7 +1918,9 @@ def __call__(self, ) return resp - class _DeleteChannel(_BaseEventarcRestTransport._BaseDeleteChannel, EventarcRestStub): + class _DeleteChannel( + _BaseEventarcRestTransport._BaseDeleteChannel, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.DeleteChannel") @@ -1607,26 +1932,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.DeleteChannelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.DeleteChannelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete channel method over HTTP. Args: @@ -1649,30 +1976,44 @@ def __call__(self, """ - http_options = _BaseEventarcRestTransport._BaseDeleteChannel._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseDeleteChannel._get_http_options() + ) request, metadata = self._interceptor.pre_delete_channel(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannel._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseDeleteChannel._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseDeleteChannel._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseDeleteChannel._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteChannel", "httpRequest": http_request, @@ -1681,7 +2022,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._DeleteChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._DeleteChannel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1694,20 +2042,24 @@ def __call__(self, resp = self._interceptor.post_delete_channel(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_channel_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_delete_channel_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteChannel", "metadata": http_response["headers"], @@ -1716,7 +2068,9 @@ def __call__(self, ) return resp - class _DeleteChannelConnection(_BaseEventarcRestTransport._BaseDeleteChannelConnection, EventarcRestStub): + class _DeleteChannelConnection( + _BaseEventarcRestTransport._BaseDeleteChannelConnection, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.DeleteChannelConnection") @@ -1728,26 +2082,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.DeleteChannelConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.DeleteChannelConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete channel connection method over HTTP. Args: @@ -1772,28 +2128,38 @@ def __call__(self, http_options = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_http_options() - request, metadata = self._interceptor.pre_delete_channel_connection(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_delete_channel_connection( + request, metadata + ) + transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannelConnection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteChannelConnection", "httpRequest": http_request, @@ -1802,7 +2168,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._DeleteChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._DeleteChannelConnection._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1815,20 +2188,24 @@ def __call__(self, resp = self._interceptor.post_delete_channel_connection(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_channel_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_delete_channel_connection_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel_connection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteChannelConnection", "metadata": http_response["headers"], @@ -1837,7 +2214,9 @@ def __call__(self, ) return resp - class _DeleteTrigger(_BaseEventarcRestTransport._BaseDeleteTrigger, EventarcRestStub): + class _DeleteTrigger( + _BaseEventarcRestTransport._BaseDeleteTrigger, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.DeleteTrigger") @@ -1849,26 +2228,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.DeleteTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.DeleteTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete trigger method over HTTP. Args: @@ -1891,30 +2272,44 @@ def __call__(self, """ - http_options = _BaseEventarcRestTransport._BaseDeleteTrigger._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseDeleteTrigger._get_http_options() + ) request, metadata = self._interceptor.pre_delete_trigger(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseDeleteTrigger._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseDeleteTrigger._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseDeleteTrigger._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseDeleteTrigger._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteTrigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteTrigger", "httpRequest": http_request, @@ -1923,7 +2318,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._DeleteTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._DeleteTrigger._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1936,20 +2338,24 @@ def __call__(self, resp = self._interceptor.post_delete_trigger(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_delete_trigger_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.delete_trigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteTrigger", "metadata": http_response["headers"], @@ -1970,26 +2376,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.GetChannelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> channel.Channel: + def __call__( + self, + request: eventarc.GetChannelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel.Channel: r"""Call the get channel method over HTTP. Args: @@ -2017,30 +2425,44 @@ def __call__(self, """ - http_options = _BaseEventarcRestTransport._BaseGetChannel._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseGetChannel._get_http_options() + ) request, metadata = self._interceptor.pre_get_channel(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetChannel._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseGetChannel._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetChannel._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseGetChannel._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetChannel", "httpRequest": http_request, @@ -2049,7 +2471,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetChannel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2064,20 +2493,24 @@ def __call__(self, resp = self._interceptor.post_get_channel(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_channel_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_channel_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = channel.Channel.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetChannel", "metadata": http_response["headers"], @@ -2086,7 +2519,9 @@ def __call__(self, ) return resp - class _GetChannelConnection(_BaseEventarcRestTransport._BaseGetChannelConnection, EventarcRestStub): + class _GetChannelConnection( + _BaseEventarcRestTransport._BaseGetChannelConnection, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.GetChannelConnection") @@ -2098,26 +2533,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.GetChannelConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> channel_connection.ChannelConnection: + def __call__( + self, + request: eventarc.GetChannelConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel_connection.ChannelConnection: r"""Call the get channel connection method over HTTP. Args: @@ -2144,30 +2581,42 @@ def __call__(self, """ - http_options = _BaseEventarcRestTransport._BaseGetChannelConnection._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseGetChannelConnection._get_http_options() + ) - request, metadata = self._interceptor.pre_get_channel_connection(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetChannelConnection._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_get_channel_connection( + request, metadata + ) + transcoded_request = _BaseEventarcRestTransport._BaseGetChannelConnection._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetChannelConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseEventarcRestTransport._BaseGetChannelConnection._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannelConnection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetChannelConnection", "httpRequest": http_request, @@ -2176,7 +2625,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetChannelConnection._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2191,20 +2647,26 @@ def __call__(self, resp = self._interceptor.post_get_channel_connection(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_channel_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_channel_connection_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = channel_connection.ChannelConnection.to_json(response) + response_payload = channel_connection.ChannelConnection.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel_connection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetChannelConnection", "metadata": http_response["headers"], @@ -2213,7 +2675,9 @@ def __call__(self, ) return resp - class _GetGoogleChannelConfig(_BaseEventarcRestTransport._BaseGetGoogleChannelConfig, EventarcRestStub): + class _GetGoogleChannelConfig( + _BaseEventarcRestTransport._BaseGetGoogleChannelConfig, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.GetGoogleChannelConfig") @@ -2225,26 +2689,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.GetGoogleChannelConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> google_channel_config.GoogleChannelConfig: + def __call__( + self, + request: eventarc.GetGoogleChannelConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> google_channel_config.GoogleChannelConfig: r"""Call the get google channel config method over HTTP. Args: @@ -2274,28 +2740,38 @@ def __call__(self, http_options = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_http_options() - request, metadata = self._interceptor.pre_get_google_channel_config(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_get_google_channel_config( + request, metadata + ) + transcoded_request = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetGoogleChannelConfig", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetGoogleChannelConfig", "httpRequest": http_request, @@ -2304,7 +2780,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetGoogleChannelConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2319,20 +2802,26 @@ def __call__(self, resp = self._interceptor.post_get_google_channel_config(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_google_channel_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_google_channel_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = google_channel_config.GoogleChannelConfig.to_json(response) + response_payload = ( + google_channel_config.GoogleChannelConfig.to_json(response) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.get_google_channel_config", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetGoogleChannelConfig", "metadata": http_response["headers"], @@ -2353,26 +2842,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.GetProviderRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> discovery.Provider: + def __call__( + self, + request: eventarc.GetProviderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discovery.Provider: r"""Call the get provider method over HTTP. Args: @@ -2394,30 +2885,44 @@ def __call__(self, """ - http_options = _BaseEventarcRestTransport._BaseGetProvider._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseGetProvider._get_http_options() + ) request, metadata = self._interceptor.pre_get_provider(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetProvider._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseGetProvider._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetProvider._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseGetProvider._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetProvider", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetProvider", "httpRequest": http_request, @@ -2426,7 +2931,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetProvider._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetProvider._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2441,20 +2953,24 @@ def __call__(self, resp = self._interceptor.post_get_provider(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_provider_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_provider_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = discovery.Provider.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.get_provider", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetProvider", "metadata": http_response["headers"], @@ -2475,26 +2991,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.GetTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> trigger.Trigger: + def __call__( + self, + request: eventarc.GetTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> trigger.Trigger: r"""Call the get trigger method over HTTP. Args: @@ -2516,30 +3034,44 @@ def __call__(self, """ - http_options = _BaseEventarcRestTransport._BaseGetTrigger._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseGetTrigger._get_http_options() + ) request, metadata = self._interceptor.pre_get_trigger(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetTrigger._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseGetTrigger._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetTrigger._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseGetTrigger._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetTrigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetTrigger", "httpRequest": http_request, @@ -2548,7 +3080,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetTrigger._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2563,20 +3102,24 @@ def __call__(self, resp = self._interceptor.post_get_trigger(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_trigger_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = trigger.Trigger.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.get_trigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetTrigger", "metadata": http_response["headers"], @@ -2585,7 +3128,9 @@ def __call__(self, ) return resp - class _ListChannelConnections(_BaseEventarcRestTransport._BaseListChannelConnections, EventarcRestStub): + class _ListChannelConnections( + _BaseEventarcRestTransport._BaseListChannelConnections, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.ListChannelConnections") @@ -2597,26 +3142,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.ListChannelConnectionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListChannelConnectionsResponse: + def __call__( + self, + request: eventarc.ListChannelConnectionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> eventarc.ListChannelConnectionsResponse: r"""Call the list channel connections method over HTTP. Args: @@ -2640,28 +3187,38 @@ def __call__(self, http_options = _BaseEventarcRestTransport._BaseListChannelConnections._get_http_options() - request, metadata = self._interceptor.pre_list_channel_connections(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseListChannelConnections._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_list_channel_connections( + request, metadata + ) + transcoded_request = _BaseEventarcRestTransport._BaseListChannelConnections._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseListChannelConnections._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseEventarcRestTransport._BaseListChannelConnections._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannelConnections", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListChannelConnections", "httpRequest": http_request, @@ -2670,7 +3227,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListChannelConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListChannelConnections._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2685,20 +3249,26 @@ def __call__(self, resp = self._interceptor.post_list_channel_connections(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_channel_connections_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_channel_connections_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = eventarc.ListChannelConnectionsResponse.to_json(response) + response_payload = eventarc.ListChannelConnectionsResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.list_channel_connections", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListChannelConnections", "metadata": http_response["headers"], @@ -2719,26 +3289,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.ListChannelsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListChannelsResponse: + def __call__( + self, + request: eventarc.ListChannelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> eventarc.ListChannelsResponse: r"""Call the list channels method over HTTP. Args: @@ -2758,30 +3330,44 @@ def __call__(self, The response message for the ``ListChannels`` method. """ - http_options = _BaseEventarcRestTransport._BaseListChannels._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseListChannels._get_http_options() + ) request, metadata = self._interceptor.pre_list_channels(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseListChannels._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseListChannels._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseListChannels._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseListChannels._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannels", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListChannels", "httpRequest": http_request, @@ -2790,7 +3376,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListChannels._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListChannels._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2805,20 +3398,24 @@ def __call__(self, resp = self._interceptor.post_list_channels(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_channels_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_channels_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = eventarc.ListChannelsResponse.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.list_channels", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListChannels", "metadata": http_response["headers"], @@ -2827,7 +3424,9 @@ def __call__(self, ) return resp - class _ListProviders(_BaseEventarcRestTransport._BaseListProviders, EventarcRestStub): + class _ListProviders( + _BaseEventarcRestTransport._BaseListProviders, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.ListProviders") @@ -2839,26 +3438,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.ListProvidersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListProvidersResponse: + def __call__( + self, + request: eventarc.ListProvidersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> eventarc.ListProvidersResponse: r"""Call the list providers method over HTTP. Args: @@ -2878,30 +3479,44 @@ def __call__(self, The response message for the ``ListProviders`` method. """ - http_options = _BaseEventarcRestTransport._BaseListProviders._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseListProviders._get_http_options() + ) request, metadata = self._interceptor.pre_list_providers(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseListProviders._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseListProviders._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseListProviders._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseListProviders._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListProviders", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListProviders", "httpRequest": http_request, @@ -2910,7 +3525,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListProviders._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListProviders._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2925,20 +3547,24 @@ def __call__(self, resp = self._interceptor.post_list_providers(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_providers_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_providers_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = eventarc.ListProvidersResponse.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.list_providers", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListProviders", "metadata": http_response["headers"], @@ -2959,26 +3585,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.ListTriggersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListTriggersResponse: + def __call__( + self, + request: eventarc.ListTriggersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> eventarc.ListTriggersResponse: r"""Call the list triggers method over HTTP. Args: @@ -2998,30 +3626,44 @@ def __call__(self, The response message for the ``ListTriggers`` method. """ - http_options = _BaseEventarcRestTransport._BaseListTriggers._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseListTriggers._get_http_options() + ) request, metadata = self._interceptor.pre_list_triggers(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseListTriggers._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseListTriggers._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseListTriggers._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseListTriggers._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListTriggers", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListTriggers", "httpRequest": http_request, @@ -3030,7 +3672,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListTriggers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListTriggers._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3045,20 +3694,24 @@ def __call__(self, resp = self._interceptor.post_list_triggers(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_triggers_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_triggers_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = eventarc.ListTriggersResponse.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.list_triggers", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListTriggers", "metadata": http_response["headers"], @@ -3067,7 +3720,9 @@ def __call__(self, ) return resp - class _UpdateChannel(_BaseEventarcRestTransport._BaseUpdateChannel, EventarcRestStub): + class _UpdateChannel( + _BaseEventarcRestTransport._BaseUpdateChannel, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.UpdateChannel") @@ -3079,27 +3734,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.UpdateChannelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.UpdateChannelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update channel method over HTTP. Args: @@ -3122,32 +3779,48 @@ def __call__(self, """ - http_options = _BaseEventarcRestTransport._BaseUpdateChannel._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseUpdateChannel._get_http_options() + ) request, metadata = self._interceptor.pre_update_channel(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseUpdateChannel._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseUpdateChannel._get_transcoded_request( + http_options, request + ) + ) - body = _BaseEventarcRestTransport._BaseUpdateChannel._get_request_body_json(transcoded_request) + body = _BaseEventarcRestTransport._BaseUpdateChannel._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseUpdateChannel._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseUpdateChannel._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateChannel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateChannel", "httpRequest": http_request, @@ -3156,7 +3829,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._UpdateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._UpdateChannel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3169,20 +3850,24 @@ def __call__(self, resp = self._interceptor.post_update_channel(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_channel_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_channel_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.update_channel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateChannel", "metadata": http_response["headers"], @@ -3191,7 +3876,9 @@ def __call__(self, ) return resp - class _UpdateGoogleChannelConfig(_BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig, EventarcRestStub): + class _UpdateGoogleChannelConfig( + _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.UpdateGoogleChannelConfig") @@ -3203,81 +3890,95 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.UpdateGoogleChannelConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gce_google_channel_config.GoogleChannelConfig: + def __call__( + self, + request: eventarc.UpdateGoogleChannelConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gce_google_channel_config.GoogleChannelConfig: r"""Call the update google channel - config method over HTTP. - - Args: - request (~.eventarc.UpdateGoogleChannelConfigRequest): - The request object. The request message for the - UpdateGoogleChannelConfig method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gce_google_channel_config.GoogleChannelConfig: - A GoogleChannelConfig is a resource - that stores the custom settings - respected by Eventarc first-party - triggers in the matching region. Once - configured, first-party event data will - be protected using the specified custom - managed encryption key instead of - Google-managed encryption keys. + config method over HTTP. + + Args: + request (~.eventarc.UpdateGoogleChannelConfigRequest): + The request object. The request message for the + UpdateGoogleChannelConfig method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gce_google_channel_config.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. """ http_options = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_http_options() - request, metadata = self._interceptor.pre_update_google_channel_config(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_update_google_channel_config( + request, metadata + ) + transcoded_request = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_transcoded_request( + http_options, request + ) - body = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_request_body_json(transcoded_request) + body = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateGoogleChannelConfig", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateGoogleChannelConfig", "httpRequest": http_request, @@ -3286,7 +3987,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._UpdateGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._UpdateGoogleChannelConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3301,20 +4010,26 @@ def __call__(self, resp = self._interceptor.post_update_google_channel_config(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_google_channel_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_google_channel_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = gce_google_channel_config.GoogleChannelConfig.to_json(response) + response_payload = ( + gce_google_channel_config.GoogleChannelConfig.to_json(response) + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.update_google_channel_config", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateGoogleChannelConfig", "metadata": http_response["headers"], @@ -3323,7 +4038,9 @@ def __call__(self, ) return resp - class _UpdateTrigger(_BaseEventarcRestTransport._BaseUpdateTrigger, EventarcRestStub): + class _UpdateTrigger( + _BaseEventarcRestTransport._BaseUpdateTrigger, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.UpdateTrigger") @@ -3335,27 +4052,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.UpdateTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.UpdateTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update trigger method over HTTP. Args: @@ -3378,32 +4097,48 @@ def __call__(self, """ - http_options = _BaseEventarcRestTransport._BaseUpdateTrigger._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseUpdateTrigger._get_http_options() + ) request, metadata = self._interceptor.pre_update_trigger(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseUpdateTrigger._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseUpdateTrigger._get_transcoded_request( + http_options, request + ) + ) - body = _BaseEventarcRestTransport._BaseUpdateTrigger._get_request_body_json(transcoded_request) + body = _BaseEventarcRestTransport._BaseUpdateTrigger._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseUpdateTrigger._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseUpdateTrigger._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateTrigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateTrigger", "httpRequest": http_request, @@ -3412,7 +4147,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._UpdateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._UpdateTrigger._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3425,20 +4168,24 @@ def __call__(self, resp = self._interceptor.post_update_trigger(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_trigger_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.update_trigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateTrigger", "metadata": http_response["headers"], @@ -3448,152 +4195,169 @@ def __call__(self, return resp @property - def create_channel_(self) -> Callable[ - [eventarc.CreateChannelRequest], - operations_pb2.Operation]: + def create_channel_( + self, + ) -> Callable[[eventarc.CreateChannelRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateChannel(self._session, self._host, self._interceptor) # type: ignore + return self._CreateChannel(self._session, self._host, self._interceptor) # type: ignore @property - def create_channel_connection(self) -> Callable[ - [eventarc.CreateChannelConnectionRequest], - operations_pb2.Operation]: + def create_channel_connection( + self, + ) -> Callable[[eventarc.CreateChannelConnectionRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateChannelConnection(self._session, self._host, self._interceptor) # type: ignore + return self._CreateChannelConnection( + self._session, self._host, self._interceptor + ) # type: ignore @property - def create_trigger(self) -> Callable[ - [eventarc.CreateTriggerRequest], - operations_pb2.Operation]: + def create_trigger( + self, + ) -> Callable[[eventarc.CreateTriggerRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._CreateTrigger(self._session, self._host, self._interceptor) # type: ignore @property - def delete_channel(self) -> Callable[ - [eventarc.DeleteChannelRequest], - operations_pb2.Operation]: + def delete_channel( + self, + ) -> Callable[[eventarc.DeleteChannelRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteChannel(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteChannel(self._session, self._host, self._interceptor) # type: ignore @property - def delete_channel_connection(self) -> Callable[ - [eventarc.DeleteChannelConnectionRequest], - operations_pb2.Operation]: + def delete_channel_connection( + self, + ) -> Callable[[eventarc.DeleteChannelConnectionRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteChannelConnection(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteChannelConnection( + self._session, self._host, self._interceptor + ) # type: ignore @property - def delete_trigger(self) -> Callable[ - [eventarc.DeleteTriggerRequest], - operations_pb2.Operation]: + def delete_trigger( + self, + ) -> Callable[[eventarc.DeleteTriggerRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteTrigger(self._session, self._host, self._interceptor) # type: ignore @property - def get_channel(self) -> Callable[ - [eventarc.GetChannelRequest], - channel.Channel]: + def get_channel(self) -> Callable[[eventarc.GetChannelRequest], channel.Channel]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetChannel(self._session, self._host, self._interceptor) # type: ignore + return self._GetChannel(self._session, self._host, self._interceptor) # type: ignore @property - def get_channel_connection(self) -> Callable[ - [eventarc.GetChannelConnectionRequest], - channel_connection.ChannelConnection]: + def get_channel_connection( + self, + ) -> Callable[ + [eventarc.GetChannelConnectionRequest], channel_connection.ChannelConnection + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetChannelConnection(self._session, self._host, self._interceptor) # type: ignore + return self._GetChannelConnection(self._session, self._host, self._interceptor) # type: ignore @property - def get_google_channel_config(self) -> Callable[ - [eventarc.GetGoogleChannelConfigRequest], - google_channel_config.GoogleChannelConfig]: + def get_google_channel_config( + self, + ) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + google_channel_config.GoogleChannelConfig, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + return self._GetGoogleChannelConfig( + self._session, self._host, self._interceptor + ) # type: ignore @property - def get_provider(self) -> Callable[ - [eventarc.GetProviderRequest], - discovery.Provider]: + def get_provider( + self, + ) -> Callable[[eventarc.GetProviderRequest], discovery.Provider]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetProvider(self._session, self._host, self._interceptor) # type: ignore + return self._GetProvider(self._session, self._host, self._interceptor) # type: ignore @property - def get_trigger(self) -> Callable[ - [eventarc.GetTriggerRequest], - trigger.Trigger]: + def get_trigger(self) -> Callable[[eventarc.GetTriggerRequest], trigger.Trigger]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._GetTrigger(self._session, self._host, self._interceptor) # type: ignore @property - def list_channel_connections(self) -> Callable[ - [eventarc.ListChannelConnectionsRequest], - eventarc.ListChannelConnectionsResponse]: + def list_channel_connections( + self, + ) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + eventarc.ListChannelConnectionsResponse, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListChannelConnections(self._session, self._host, self._interceptor) # type: ignore + return self._ListChannelConnections( + self._session, self._host, self._interceptor + ) # type: ignore @property - def list_channels(self) -> Callable[ - [eventarc.ListChannelsRequest], - eventarc.ListChannelsResponse]: + def list_channels( + self, + ) -> Callable[[eventarc.ListChannelsRequest], eventarc.ListChannelsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListChannels(self._session, self._host, self._interceptor) # type: ignore + return self._ListChannels(self._session, self._host, self._interceptor) # type: ignore @property - def list_providers(self) -> Callable[ - [eventarc.ListProvidersRequest], - eventarc.ListProvidersResponse]: + def list_providers( + self, + ) -> Callable[[eventarc.ListProvidersRequest], eventarc.ListProvidersResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListProviders(self._session, self._host, self._interceptor) # type: ignore + return self._ListProviders(self._session, self._host, self._interceptor) # type: ignore @property - def list_triggers(self) -> Callable[ - [eventarc.ListTriggersRequest], - eventarc.ListTriggersResponse]: + def list_triggers( + self, + ) -> Callable[[eventarc.ListTriggersRequest], eventarc.ListTriggersResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListTriggers(self._session, self._host, self._interceptor) # type: ignore + return self._ListTriggers(self._session, self._host, self._interceptor) # type: ignore @property - def update_channel(self) -> Callable[ - [eventarc.UpdateChannelRequest], - operations_pb2.Operation]: + def update_channel( + self, + ) -> Callable[[eventarc.UpdateChannelRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateChannel(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateChannel(self._session, self._host, self._interceptor) # type: ignore @property - def update_google_channel_config(self) -> Callable[ - [eventarc.UpdateGoogleChannelConfigRequest], - gce_google_channel_config.GoogleChannelConfig]: + def update_google_channel_config( + self, + ) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + gce_google_channel_config.GoogleChannelConfig, + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateGoogleChannelConfig( + self._session, self._host, self._interceptor + ) # type: ignore @property - def update_trigger(self) -> Callable[ - [eventarc.UpdateTriggerRequest], - operations_pb2.Operation]: + def update_trigger( + self, + ) -> Callable[[eventarc.UpdateTriggerRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateTrigger(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(_BaseEventarcRestTransport._BaseGetLocation, EventarcRestStub): def __hash__(self): @@ -3607,27 +4371,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -3645,30 +4410,44 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options = _BaseEventarcRestTransport._BaseGetLocation._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseGetLocation._get_http_options() + ) request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetLocation", "httpRequest": http_request, @@ -3677,7 +4456,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3688,19 +4474,21 @@ def __call__(self, resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetLocation", "httpResponse": http_response, @@ -3711,9 +4499,11 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(_BaseEventarcRestTransport._BaseListLocations, EventarcRestStub): + class _ListLocations( + _BaseEventarcRestTransport._BaseListLocations, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.ListLocations") @@ -3725,27 +4515,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -3763,30 +4554,44 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options = _BaseEventarcRestTransport._BaseListLocations._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseListLocations._get_http_options() + ) request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListLocations", "httpRequest": http_request, @@ -3795,7 +4600,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3806,19 +4618,21 @@ def __call__(self, resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListLocations", "httpResponse": http_response, @@ -3829,7 +4643,7 @@ def __call__(self, @property def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore class _GetIamPolicy(_BaseEventarcRestTransport._BaseGetIamPolicy, EventarcRestStub): def __hash__(self): @@ -3843,27 +4657,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: r"""Call the get iam policy method over HTTP. Args: @@ -3881,30 +4696,44 @@ def __call__(self, policy_pb2.Policy: Response from GetIamPolicy method. """ - http_options = _BaseEventarcRestTransport._BaseGetIamPolicy._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseGetIamPolicy._get_http_options() + ) request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetIamPolicy", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetIamPolicy", "httpRequest": http_request, @@ -3913,7 +4742,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3924,19 +4760,21 @@ def __call__(self, resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetIamPolicy", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetIamPolicy", "httpResponse": http_response, @@ -3947,7 +4785,7 @@ def __call__(self, @property def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore class _SetIamPolicy(_BaseEventarcRestTransport._BaseSetIamPolicy, EventarcRestStub): def __hash__(self): @@ -3961,28 +4799,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: r"""Call the set iam policy method over HTTP. Args: @@ -4000,32 +4839,48 @@ def __call__(self, policy_pb2.Policy: Response from SetIamPolicy method. """ - http_options = _BaseEventarcRestTransport._BaseSetIamPolicy._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseSetIamPolicy._get_http_options() + ) request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + ) - body = _BaseEventarcRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) + body = _BaseEventarcRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.SetIamPolicy", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "SetIamPolicy", "httpRequest": http_request, @@ -4034,7 +4889,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4045,19 +4908,21 @@ def __call__(self, resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.SetIamPolicy", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "SetIamPolicy", "httpResponse": http_response, @@ -4068,9 +4933,11 @@ def __call__(self, @property def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - class _TestIamPermissions(_BaseEventarcRestTransport._BaseTestIamPermissions, EventarcRestStub): + class _TestIamPermissions( + _BaseEventarcRestTransport._BaseTestIamPermissions, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.TestIamPermissions") @@ -4082,28 +4949,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Call the test iam permissions method over HTTP. Args: @@ -4121,32 +4989,46 @@ def __call__(self, iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. """ - http_options = _BaseEventarcRestTransport._BaseTestIamPermissions._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseTestIamPermissions._get_http_options() + ) - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseEventarcRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) - body = _BaseEventarcRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) + body = _BaseEventarcRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseEventarcRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.TestIamPermissions", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "TestIamPermissions", "httpRequest": http_request, @@ -4155,7 +5037,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4166,19 +5056,21 @@ def __call__(self, resp = iam_policy_pb2.TestIamPermissionsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.TestIamPermissions", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "TestIamPermissions", "httpResponse": http_response, @@ -4189,9 +5081,11 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(_BaseEventarcRestTransport._BaseCancelOperation, EventarcRestStub): + class _CancelOperation( + _BaseEventarcRestTransport._BaseCancelOperation, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.CancelOperation") @@ -4203,28 +5097,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -4239,32 +5134,52 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseEventarcRestTransport._BaseCancelOperation._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseCancelOperation._get_http_options() + ) - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = ( + _BaseEventarcRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + ) - body = _BaseEventarcRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) + body = ( + _BaseEventarcRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.CancelOperation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CancelOperation", "httpRequest": http_request, @@ -4273,7 +5188,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4284,9 +5207,11 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(_BaseEventarcRestTransport._BaseDeleteOperation, EventarcRestStub): + class _DeleteOperation( + _BaseEventarcRestTransport._BaseDeleteOperation, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.DeleteOperation") @@ -4298,27 +5223,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -4333,30 +5259,46 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseEventarcRestTransport._BaseDeleteOperation._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseDeleteOperation._get_http_options() + ) - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = ( + _BaseEventarcRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteOperation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteOperation", "httpRequest": http_request, @@ -4365,7 +5307,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4376,7 +5325,7 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseEventarcRestTransport._BaseGetOperation, EventarcRestStub): def __hash__(self): @@ -4390,27 +5339,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -4428,30 +5378,44 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseEventarcRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetOperation", "httpRequest": http_request, @@ -4460,7 +5424,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4471,19 +5442,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetOperation", "httpResponse": http_response, @@ -4494,9 +5467,11 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(_BaseEventarcRestTransport._BaseListOperations, EventarcRestStub): + class _ListOperations( + _BaseEventarcRestTransport._BaseListOperations, EventarcRestStub + ): def __hash__(self): return hash("EventarcRestTransport.ListOperations") @@ -4508,27 +5483,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -4546,30 +5522,44 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options = _BaseEventarcRestTransport._BaseListOperations._get_http_options() + http_options = ( + _BaseEventarcRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseEventarcRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseEventarcRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseEventarcRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + query_params = ( + _BaseEventarcRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListOperations", "httpRequest": http_request, @@ -4578,7 +5568,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4589,19 +5586,21 @@ def __call__(self, resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListOperations", "httpResponse": http_response, @@ -4618,6 +5617,4 @@ def close(self): self._session.close() -__all__=( - 'EventarcRestTransport', -) +__all__ = ("EventarcRestTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py index 6ff0278064bd..328fa5f99564 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py @@ -14,27 +14,30 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import EventarcTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - -from google.cloud.eventarc_v1.types import channel -from google.cloud.eventarc_v1.types import channel_connection -from google.cloud.eventarc_v1.types import discovery -from google.cloud.eventarc_v1.types import eventarc -from google.cloud.eventarc_v1.types import google_channel_config -from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config -from google.cloud.eventarc_v1.types import trigger +from google.api_core import gapic_v1, path_template +from google.cloud.eventarc_v1.types import ( + channel, + channel_connection, + discovery, + eventarc, + google_channel_config, + trigger, +) +from google.cloud.eventarc_v1.types import ( + google_channel_config as gce_google_channel_config, +) +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import ( + iam_policy_pb2, # type: ignore + policy_pb2, # type: ignore +) from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from .base import DEFAULT_CLIENT_INFO, EventarcTransport class _BaseEventarcRestTransport(EventarcTransport): @@ -50,14 +53,16 @@ class _BaseEventarcRestTransport(EventarcTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'eventarc.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "eventarc.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): @@ -81,7 +86,9 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -92,27 +99,34 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseCreateChannel: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "channelId" : "", "validateOnly" : False, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "channelId": "", + "validateOnly": False, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/channels', - 'body': 'channel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/channels", + "body": "channel", + }, ] return http_options @@ -127,17 +141,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseCreateChannel._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseCreateChannel._get_unset_required_fields( + query_params + ) + ) return query_params @@ -145,20 +165,26 @@ class _BaseCreateChannelConnection: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "channelConnectionId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "channelConnectionId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', - 'body': 'channel_connection', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/channelConnections", + "body": "channel_connection", + }, ] return http_options @@ -173,17 +199,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseCreateChannelConnection._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseCreateChannelConnection._get_unset_required_fields( + query_params + ) + ) return query_params @@ -191,20 +223,27 @@ class _BaseCreateTrigger: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "triggerId" : "", "validateOnly" : False, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "triggerId": "", + "validateOnly": False, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - 'body': 'trigger', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/triggers", + "body": "trigger", + }, ] return http_options @@ -219,17 +258,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseCreateTrigger._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseCreateTrigger._get_unset_required_fields( + query_params + ) + ) return query_params @@ -237,19 +282,25 @@ class _BaseDeleteChannel: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly": False, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/channels/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/channels/*}", + }, ] return http_options @@ -261,11 +312,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseDeleteChannel._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseDeleteChannel._get_unset_required_fields( + query_params + ) + ) return query_params @@ -273,19 +330,23 @@ class _BaseDeleteChannelConnection: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/channelConnections/*}", + }, ] return http_options @@ -297,11 +358,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseDeleteChannelConnection._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_unset_required_fields( + query_params + ) + ) return query_params @@ -309,19 +376,25 @@ class _BaseDeleteTrigger: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly": False, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/triggers/*}", + }, ] return http_options @@ -333,11 +406,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseDeleteTrigger._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseDeleteTrigger._get_unset_required_fields( + query_params + ) + ) return query_params @@ -345,19 +424,23 @@ class _BaseGetChannel: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/channels/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/channels/*}", + }, ] return http_options @@ -369,11 +452,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseGetChannel._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseGetChannel._get_unset_required_fields( + query_params + ) + ) return query_params @@ -381,19 +470,23 @@ class _BaseGetChannelConnection: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/channelConnections/*}", + }, ] return http_options @@ -405,11 +498,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseGetChannelConnection._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseGetChannelConnection._get_unset_required_fields( + query_params + ) + ) return query_params @@ -417,19 +516,23 @@ class _BaseGetGoogleChannelConfig: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/googleChannelConfig}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/googleChannelConfig}", + }, ] return http_options @@ -441,11 +544,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_unset_required_fields( + query_params + ) + ) return query_params @@ -453,19 +562,23 @@ class _BaseGetProvider: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/providers/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/providers/*}", + }, ] return http_options @@ -477,11 +590,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseGetProvider._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseGetProvider._get_unset_required_fields( + query_params + ) + ) return query_params @@ -489,19 +608,23 @@ class _BaseGetTrigger: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/triggers/*}", + }, ] return http_options @@ -513,11 +636,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseGetTrigger._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseGetTrigger._get_unset_required_fields( + query_params + ) + ) return query_params @@ -525,19 +654,23 @@ class _BaseListChannelConnections: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/channelConnections", + }, ] return http_options @@ -549,11 +682,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseListChannelConnections._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseListChannelConnections._get_unset_required_fields( + query_params + ) + ) return query_params @@ -561,19 +700,23 @@ class _BaseListChannels: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/channels', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/channels", + }, ] return http_options @@ -585,11 +728,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseListChannels._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseListChannels._get_unset_required_fields( + query_params + ) + ) return query_params @@ -597,19 +746,23 @@ class _BaseListProviders: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/providers', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/providers", + }, ] return http_options @@ -621,11 +774,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseListProviders._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseListProviders._get_unset_required_fields( + query_params + ) + ) return query_params @@ -633,19 +792,23 @@ class _BaseListTriggers: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/triggers", + }, ] return http_options @@ -657,11 +820,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseListTriggers._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseListTriggers._get_unset_required_fields( + query_params + ) + ) return query_params @@ -669,20 +838,26 @@ class _BaseUpdateChannel: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly": False, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{channel.name=projects/*/locations/*/channels/*}', - 'body': 'channel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{channel.name=projects/*/locations/*/channels/*}", + "body": "channel", + }, ] return http_options @@ -697,17 +872,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseUpdateChannel._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseUpdateChannel._get_unset_required_fields( + query_params + ) + ) return query_params @@ -715,20 +896,24 @@ class _BaseUpdateGoogleChannelConfig: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}', - 'body': 'google_channel_config', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}", + "body": "google_channel_config", + }, ] return http_options @@ -743,17 +928,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_unset_required_fields( + query_params + ) + ) return query_params @@ -761,20 +952,26 @@ class _BaseUpdateTrigger: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly": False, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{trigger.name=projects/*/locations/*/triggers/*}', - 'body': 'trigger', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{trigger.name=projects/*/locations/*/triggers/*}", + "body": "trigger", + }, ] return http_options @@ -789,17 +986,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseEventarcRestTransport._BaseUpdateTrigger._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseEventarcRestTransport._BaseUpdateTrigger._get_unset_required_fields( + query_params + ) + ) return query_params @@ -809,23 +1012,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListLocations: @@ -834,23 +1037,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetIamPolicy: @@ -859,31 +1062,31 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:getIamPolicy', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/triggers/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/channels/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/channelConnections/*}:getIamPolicy", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseSetIamPolicy: @@ -892,38 +1095,39 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:setIamPolicy', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/triggers/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/channels/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/channelConnections/*}:setIamPolicy", + "body": "*", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseTestIamPermissions: @@ -932,38 +1136,39 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:testIamPermissions', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/triggers/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/channels/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/channelConnections/*}:testIamPermissions", + "body": "*", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseCancelOperation: @@ -972,28 +1177,29 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseDeleteOperation: @@ -1002,23 +1208,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetOperation: @@ -1027,23 +1233,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListOperations: @@ -1052,26 +1258,24 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseEventarcRestTransport', -) +__all__ = ("_BaseEventarcRestTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py index 0521e2f2f27d..375a58f3bc9e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py @@ -53,10 +53,10 @@ GoogleChannelConfig, ) from .trigger import ( + GKE, CloudRun, Destination, EventFilter, - GKE, Pubsub, StateCondition, Transport, @@ -64,41 +64,41 @@ ) __all__ = ( - 'Channel', - 'ChannelConnection', - 'EventType', - 'FilteringAttribute', - 'Provider', - 'CreateChannelConnectionRequest', - 'CreateChannelRequest', - 'CreateTriggerRequest', - 'DeleteChannelConnectionRequest', - 'DeleteChannelRequest', - 'DeleteTriggerRequest', - 'GetChannelConnectionRequest', - 'GetChannelRequest', - 'GetGoogleChannelConfigRequest', - 'GetProviderRequest', - 'GetTriggerRequest', - 'ListChannelConnectionsRequest', - 'ListChannelConnectionsResponse', - 'ListChannelsRequest', - 'ListChannelsResponse', - 'ListProvidersRequest', - 'ListProvidersResponse', - 'ListTriggersRequest', - 'ListTriggersResponse', - 'OperationMetadata', - 'UpdateChannelRequest', - 'UpdateGoogleChannelConfigRequest', - 'UpdateTriggerRequest', - 'GoogleChannelConfig', - 'CloudRun', - 'Destination', - 'EventFilter', - 'GKE', - 'Pubsub', - 'StateCondition', - 'Transport', - 'Trigger', + "Channel", + "ChannelConnection", + "EventType", + "FilteringAttribute", + "Provider", + "CreateChannelConnectionRequest", + "CreateChannelRequest", + "CreateTriggerRequest", + "DeleteChannelConnectionRequest", + "DeleteChannelRequest", + "DeleteTriggerRequest", + "GetChannelConnectionRequest", + "GetChannelRequest", + "GetGoogleChannelConfigRequest", + "GetProviderRequest", + "GetTriggerRequest", + "ListChannelConnectionsRequest", + "ListChannelConnectionsResponse", + "ListChannelsRequest", + "ListChannelsResponse", + "ListProvidersRequest", + "ListProvidersResponse", + "ListTriggersRequest", + "ListTriggersResponse", + "OperationMetadata", + "UpdateChannelRequest", + "UpdateGoogleChannelConfigRequest", + "UpdateTriggerRequest", + "GoogleChannelConfig", + "CloudRun", + "Destination", + "EventFilter", + "GKE", + "Pubsub", + "StateCondition", + "Transport", + "Trigger", ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py index 4c1334291614..ce80ae5ab099 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py @@ -17,15 +17,13 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'Channel', + "Channel", }, ) @@ -79,6 +77,7 @@ class Channel(proto.Message): It must match the pattern ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. """ + class State(proto.Enum): r"""State lists all the possible states of a Channel @@ -110,6 +109,7 @@ class State(proto.Enum): the subscriber should create a new Channel and give it to the provider. """ + STATE_UNSPECIFIED = 0 PENDING = 1 ACTIVE = 2 @@ -140,7 +140,7 @@ class State(proto.Enum): pubsub_topic: str = proto.Field( proto.STRING, number=8, - oneof='transport', + oneof="transport", ) state: State = proto.Field( proto.ENUM, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py index 64e741732d60..da457b8ef3f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py @@ -17,15 +17,13 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'ChannelConnection', + "ChannelConnection", }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py index 430bae6e41ae..f8b18c62bc50 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py @@ -19,13 +19,12 @@ import proto # type: ignore - __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'Provider', - 'EventType', - 'FilteringAttribute', + "Provider", + "EventType", + "FilteringAttribute", }, ) @@ -53,10 +52,10 @@ class Provider(proto.Message): proto.STRING, number=2, ) - event_types: MutableSequence['EventType'] = proto.RepeatedField( + event_types: MutableSequence["EventType"] = proto.RepeatedField( proto.MESSAGE, number=3, - message='EventType', + message="EventType", ) @@ -95,10 +94,10 @@ class EventType(proto.Message): proto.STRING, number=2, ) - filtering_attributes: MutableSequence['FilteringAttribute'] = proto.RepeatedField( + filtering_attributes: MutableSequence["FilteringAttribute"] = proto.RepeatedField( proto.MESSAGE, number=3, - message='FilteringAttribute', + message="FilteringAttribute", ) event_schema_uri: str = proto.Field( proto.STRING, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py index 2f6e988bc1ef..72aec785fc13 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -17,43 +17,43 @@ from typing import MutableMapping, MutableSequence +import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import proto # type: ignore - from google.cloud.eventarc_v1.types import channel as gce_channel from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection from google.cloud.eventarc_v1.types import discovery -from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config +from google.cloud.eventarc_v1.types import ( + google_channel_config as gce_google_channel_config, +) from google.cloud.eventarc_v1.types import trigger as gce_trigger -import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore -import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'GetTriggerRequest', - 'ListTriggersRequest', - 'ListTriggersResponse', - 'CreateTriggerRequest', - 'UpdateTriggerRequest', - 'DeleteTriggerRequest', - 'GetChannelRequest', - 'ListChannelsRequest', - 'ListChannelsResponse', - 'CreateChannelRequest', - 'UpdateChannelRequest', - 'DeleteChannelRequest', - 'GetProviderRequest', - 'ListProvidersRequest', - 'ListProvidersResponse', - 'GetChannelConnectionRequest', - 'ListChannelConnectionsRequest', - 'ListChannelConnectionsResponse', - 'CreateChannelConnectionRequest', - 'DeleteChannelConnectionRequest', - 'UpdateGoogleChannelConfigRequest', - 'GetGoogleChannelConfigRequest', - 'OperationMetadata', + "GetTriggerRequest", + "ListTriggersRequest", + "ListTriggersResponse", + "CreateTriggerRequest", + "UpdateTriggerRequest", + "DeleteTriggerRequest", + "GetChannelRequest", + "ListChannelsRequest", + "ListChannelsResponse", + "CreateChannelRequest", + "UpdateChannelRequest", + "DeleteChannelRequest", + "GetProviderRequest", + "ListProvidersRequest", + "ListProvidersResponse", + "GetChannelConnectionRequest", + "ListChannelConnectionsRequest", + "ListChannelConnectionsResponse", + "CreateChannelConnectionRequest", + "DeleteChannelConnectionRequest", + "UpdateGoogleChannelConfigRequest", + "GetGoogleChannelConfigRequest", + "OperationMetadata", }, ) @@ -625,10 +625,12 @@ class ListChannelConnectionsResponse(proto.Message): def raw_page(self): return self - channel_connections: MutableSequence[gce_channel_connection.ChannelConnection] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gce_channel_connection.ChannelConnection, + channel_connections: MutableSequence[gce_channel_connection.ChannelConnection] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gce_channel_connection.ChannelConnection, + ) ) next_page_token: str = proto.Field( proto.STRING, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py index 3e741c436df3..72a19492be36 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py @@ -17,15 +17,13 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'GoogleChannelConfig', + "GoogleChannelConfig", }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py index 2e142e47732b..36acf6276da9 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -17,23 +17,21 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.rpc.code_pb2 as code_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'Trigger', - 'EventFilter', - 'StateCondition', - 'Destination', - 'Transport', - 'CloudRun', - 'GKE', - 'Pubsub', + "Trigger", + "EventFilter", + "StateCondition", + "Destination", + "Transport", + "CloudRun", + "GKE", + "Pubsub", }, ) @@ -126,24 +124,24 @@ class Trigger(proto.Message): number=6, message=timestamp_pb2.Timestamp, ) - event_filters: MutableSequence['EventFilter'] = proto.RepeatedField( + event_filters: MutableSequence["EventFilter"] = proto.RepeatedField( proto.MESSAGE, number=8, - message='EventFilter', + message="EventFilter", ) service_account: str = proto.Field( proto.STRING, number=9, ) - destination: 'Destination' = proto.Field( + destination: "Destination" = proto.Field( proto.MESSAGE, number=10, - message='Destination', + message="Destination", ) - transport: 'Transport' = proto.Field( + transport: "Transport" = proto.Field( proto.MESSAGE, number=11, - message='Transport', + message="Transport", ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, @@ -154,11 +152,11 @@ class Trigger(proto.Message): proto.STRING, number=13, ) - conditions: MutableMapping[str, 'StateCondition'] = proto.MapField( + conditions: MutableMapping[str, "StateCondition"] = proto.MapField( proto.STRING, proto.MESSAGE, number=15, - message='StateCondition', + message="StateCondition", ) etag: str = proto.Field( proto.STRING, @@ -260,27 +258,27 @@ class Destination(proto.Message): This field is a member of `oneof`_ ``descriptor``. """ - cloud_run: 'CloudRun' = proto.Field( + cloud_run: "CloudRun" = proto.Field( proto.MESSAGE, number=1, - oneof='descriptor', - message='CloudRun', + oneof="descriptor", + message="CloudRun", ) cloud_function: str = proto.Field( proto.STRING, number=2, - oneof='descriptor', + oneof="descriptor", ) - gke: 'GKE' = proto.Field( + gke: "GKE" = proto.Field( proto.MESSAGE, number=3, - oneof='descriptor', - message='GKE', + oneof="descriptor", + message="GKE", ) workflow: str = proto.Field( proto.STRING, number=4, - oneof='descriptor', + oneof="descriptor", ) @@ -299,11 +297,11 @@ class Transport(proto.Message): This field is a member of `oneof`_ ``intermediary``. """ - pubsub: 'Pubsub' = proto.Field( + pubsub: "Pubsub" = proto.Field( proto.MESSAGE, number=1, - oneof='intermediary', - message='Pubsub', + oneof="intermediary", + message="Pubsub", ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 584bb9d01c7e..a0e493b72236 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -17,9 +17,8 @@ import pathlib import re import shutil - -from typing import Dict, List import warnings +from typing import Dict, List import nox @@ -154,7 +153,8 @@ def lint(session): # 2. Check formatting session.run( - "ruff", "format", + "ruff", + "format", "--check", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", @@ -167,12 +167,15 @@ def lint(session): @nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): """(Deprecated) Legacy session. Please use 'nox -s format'.""" - session.log("WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future.") + session.log( + "WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future." + ) # Just run the ruff formatter (keeping legacy behavior of only formatting, not sorting imports) session.install(RUFF_VERSION) session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", *LINT_PATHS, @@ -191,8 +194,10 @@ def format(session): # check --select I: Enables strict import sorting # --fix: Applies the changes automatically session.run( - "ruff", "check", - "--select", "I", + "ruff", + "check", + "--select", + "I", "--fix", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length @@ -201,7 +206,8 @@ def format(session): # 3. Run Ruff to format code session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length *LINT_PATHS, @@ -386,8 +392,10 @@ def docs(session): "sphinx-build", "-T", # show full traceback on exception "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + "-b", + "html", # builder + "-d", + os.path.join("docs", "_build", "doctrees", ""), # cache directory # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py index bf84faf9b543..4c360baf5f68 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py @@ -60,4 +60,5 @@ async def sample_create_channel(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_CreateChannel_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py index f9545399917d..174dff29005e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py @@ -59,4 +59,5 @@ async def sample_create_channel_connection(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_CreateChannelConnection_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py index 2ec59ef191cd..e1e41c7ea99c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py @@ -59,4 +59,5 @@ def sample_create_channel_connection(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_CreateChannelConnection_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py index 15c6ce7f870e..6b27839da32b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py @@ -60,4 +60,5 @@ def sample_create_channel(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_CreateChannel_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py index c8fc4605e1a1..cefe577a3c9b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py @@ -63,4 +63,5 @@ async def sample_create_trigger(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_CreateTrigger_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py index 0df4f6f6d168..f4f0d9596a77 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py @@ -63,4 +63,5 @@ def sample_create_trigger(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_CreateTrigger_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py index df1f80810c35..d71a183e7b63 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py @@ -54,4 +54,5 @@ async def sample_delete_channel(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_DeleteChannel_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py index 5343ff4d0fe1..923da39b3eb5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py @@ -53,4 +53,5 @@ async def sample_delete_channel_connection(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_DeleteChannelConnection_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py index bb5c5a46a4a9..68a212a6263e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py @@ -53,4 +53,5 @@ def sample_delete_channel_connection(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_DeleteChannelConnection_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py index dc9b80adbcd5..beb3ef0f08f0 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py @@ -54,4 +54,5 @@ def sample_delete_channel(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_DeleteChannel_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py index 365a0a068f9f..bbc735c128f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py @@ -54,4 +54,5 @@ async def sample_delete_trigger(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_DeleteTrigger_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py index ced373f70528..4ec1bf6c55bd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py @@ -54,4 +54,5 @@ def sample_delete_trigger(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_DeleteTrigger_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py index c642f6e60bbe..99076a456295 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py @@ -49,4 +49,5 @@ async def sample_get_channel(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_GetChannel_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py index 3ef14c182dae..e4ab98b5cfb4 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py @@ -49,4 +49,5 @@ async def sample_get_channel_connection(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_GetChannelConnection_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py index 3e32910246f4..8b99dfbbe21c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py @@ -49,4 +49,5 @@ def sample_get_channel_connection(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_GetChannelConnection_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py index b5a95d779890..990dfa8339a7 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py @@ -49,4 +49,5 @@ def sample_get_channel(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_GetChannel_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py index 08fac9576f1d..b95f03f0973a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py @@ -49,4 +49,5 @@ async def sample_get_google_channel_config(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py index b7a368375fb6..89b58688712e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py @@ -49,4 +49,5 @@ def sample_get_google_channel_config(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py index ca30f56b8538..cf1e1b4f8f25 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py @@ -49,4 +49,5 @@ async def sample_get_provider(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_GetProvider_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py index 182e4f03ba5c..512d1610a834 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py @@ -49,4 +49,5 @@ def sample_get_provider(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_GetProvider_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py index 3e7f4f00cf98..8f04dabec052 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py @@ -49,4 +49,5 @@ async def sample_get_trigger(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_GetTrigger_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py index 22097c52adf1..52426315212a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py @@ -49,4 +49,5 @@ def sample_get_trigger(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_GetTrigger_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py index 0f4c7160a0f8..e3f14a9a59a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py @@ -50,4 +50,5 @@ async def sample_list_channel_connections(): async for response in page_result: print(response) + # [END eventarc_v1_generated_Eventarc_ListChannelConnections_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py index 4b24c83ca86e..6a07bbddcec0 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py @@ -50,4 +50,5 @@ def sample_list_channel_connections(): for response in page_result: print(response) + # [END eventarc_v1_generated_Eventarc_ListChannelConnections_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py index 1a7522bc78f1..7db37d471afd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py @@ -50,4 +50,5 @@ async def sample_list_channels(): async for response in page_result: print(response) + # [END eventarc_v1_generated_Eventarc_ListChannels_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py index cbb9fcdb5658..f342b56f1681 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py @@ -50,4 +50,5 @@ def sample_list_channels(): for response in page_result: print(response) + # [END eventarc_v1_generated_Eventarc_ListChannels_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py index af58a0d5d41e..6f498761f115 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py @@ -50,4 +50,5 @@ async def sample_list_providers(): async for response in page_result: print(response) + # [END eventarc_v1_generated_Eventarc_ListProviders_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py index fbe34f043b8e..f8958e8dcee2 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py @@ -50,4 +50,5 @@ def sample_list_providers(): for response in page_result: print(response) + # [END eventarc_v1_generated_Eventarc_ListProviders_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py index 1f125cfc7a71..b1dd51af5ca3 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py @@ -50,4 +50,5 @@ async def sample_list_triggers(): async for response in page_result: print(response) + # [END eventarc_v1_generated_Eventarc_ListTriggers_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py index fa5d923b931e..ed60520849a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py @@ -50,4 +50,5 @@ def sample_list_triggers(): for response in page_result: print(response) + # [END eventarc_v1_generated_Eventarc_ListTriggers_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py index dd61b4d710d8..609b0e3af4b8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py @@ -53,4 +53,5 @@ async def sample_update_channel(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_UpdateChannel_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py index aed47673859b..7bb9bf1e7b4d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py @@ -53,4 +53,5 @@ def sample_update_channel(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_UpdateChannel_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py index 343afc4d6c15..4248c1e9d51b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py @@ -52,4 +52,5 @@ async def sample_update_google_channel_config(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py index 816b92da0099..af69521d1f5f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py @@ -52,4 +52,5 @@ def sample_update_google_channel_config(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py index 8782f34f532b..0946f089e3ae 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py @@ -53,4 +53,5 @@ async def sample_update_trigger(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_UpdateTrigger_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py index 4954ec9087c5..80626fbd37e6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py @@ -53,4 +53,5 @@ def sample_update_trigger(): # Handle the response print(response) + # [END eventarc_v1_generated_Eventarc_UpdateTrigger_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 200443c6be46..4068fb085edd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -17,20 +17,22 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-cloud-eventarc' +name = "google-cloud-eventarc" description = "Google Cloud Eventarc API client library" version = None -with open(os.path.join(package_root, 'google/cloud/eventarc_v1/gapic_version.py')) as fp: +with open( + os.path.join(package_root, "google/cloud/eventarc_v1/gapic_version.py") +) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": @@ -50,8 +52,7 @@ "protobuf >= 4.25.8, < 8.0.0", "grpc-google-iam-v1 >= 0.14.0, <1.0.0", ] -extras = { -} +extras = {} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index eff2b1349f42..3f7c0f6f4abb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,67 +22,74 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format import json import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc import pytest -from collections.abc import Sequence, Mapping from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +from grpc.experimental import aio from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response from requests.sessions import Session -from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import client_options +import google.api_core.operation_async as operation_async # type: ignore +import google.auth +import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +import google.rpc.code_pb2 as code_pb2 # type: ignore +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.eventarc_v1.services.eventarc import EventarcAsyncClient -from google.cloud.eventarc_v1.services.eventarc import EventarcClient -from google.cloud.eventarc_v1.services.eventarc import pagers -from google.cloud.eventarc_v1.services.eventarc import transports -from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.services.eventarc import ( + EventarcAsyncClient, + EventarcClient, + pagers, + transports, +) +from google.cloud.eventarc_v1.types import ( + channel, + channel_connection, + discovery, + eventarc, + google_channel_config, + trigger, +) from google.cloud.eventarc_v1.types import channel as gce_channel -from google.cloud.eventarc_v1.types import channel_connection from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection -from google.cloud.eventarc_v1.types import discovery -from google.cloud.eventarc_v1.types import eventarc -from google.cloud.eventarc_v1.types import google_channel_config -from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config -from google.cloud.eventarc_v1.types import trigger +from google.cloud.eventarc_v1.types import ( + google_channel_config as gce_google_channel_config, +) from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.iam.v1 import ( + iam_policy_pb2, # type: ignore + options_pb2, # type: ignore + policy_pb2, # type: ignore +) +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account -import google.api_core.operation_async as operation_async # type: ignore -import google.auth -import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore -import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -import google.rpc.code_pb2 as code_pb2 # type: ignore - - CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -96,9 +104,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -106,17 +116,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -129,12 +149,22 @@ def test__get_default_mtls_endpoint(): assert EventarcClient._get_default_mtls_endpoint(None) is None assert EventarcClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert EventarcClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert EventarcClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert EventarcClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ( + EventarcClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + EventarcClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + EventarcClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) assert EventarcClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi assert EventarcClient._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + def test__read_environment_variables(): assert EventarcClient._read_environment_variables() == (False, "auto", None) @@ -156,10 +186,10 @@ def test__read_environment_variables(): ) else: assert EventarcClient._read_environment_variables() == ( - False, - "auto", - None, - ) + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert EventarcClient._read_environment_variables() == (False, "never", None) @@ -173,10 +203,17 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: EventarcClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert EventarcClient._read_environment_variables() == (False, "auto", "foo.com") + assert EventarcClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -185,7 +222,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert EventarcClient._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -193,7 +232,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert EventarcClient._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -205,7 +246,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert EventarcClient._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -217,7 +260,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert EventarcClient._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -229,7 +274,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert EventarcClient._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -244,83 +291,164 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): EventarcClient._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert EventarcClient._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert EventarcClient._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert EventarcClient._get_client_cert_source(None, False) is None - assert EventarcClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert EventarcClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + EventarcClient._get_client_cert_source(mock_provided_cert_source, False) is None + ) + assert ( + EventarcClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + EventarcClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + EventarcClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert EventarcClient._get_client_cert_source(None, True) is mock_default_cert_source - assert EventarcClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(EventarcClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcClient)) -@mock.patch.object(EventarcAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcAsyncClient)) +@mock.patch.object( + EventarcClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EventarcClient), +) +@mock.patch.object( + EventarcAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EventarcAsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = EventarcClient._DEFAULT_UNIVERSE - default_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert EventarcClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert EventarcClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == EventarcClient.DEFAULT_MTLS_ENDPOINT - assert EventarcClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert EventarcClient._get_api_endpoint(None, None, default_universe, "always") == EventarcClient.DEFAULT_MTLS_ENDPOINT - assert EventarcClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == EventarcClient.DEFAULT_MTLS_ENDPOINT - assert EventarcClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert EventarcClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + EventarcClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + EventarcClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == EventarcClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EventarcClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + EventarcClient._get_api_endpoint(None, None, default_universe, "always") + == EventarcClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EventarcClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == EventarcClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + EventarcClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + EventarcClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - EventarcClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + EventarcClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert EventarcClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert EventarcClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert EventarcClient._get_universe_domain(None, None) == EventarcClient._DEFAULT_UNIVERSE + assert ( + EventarcClient._get_universe_domain(client_universe_domain, universe_domain_env) + == client_universe_domain + ) + assert ( + EventarcClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + EventarcClient._get_universe_domain(None, None) + == EventarcClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: EventarcClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -336,7 +464,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -349,14 +478,20 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (EventarcClient, "grpc"), - (EventarcAsyncClient, "grpc_asyncio"), - (EventarcClient, "rest"), -]) + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EventarcClient, "grpc"), + (EventarcAsyncClient, "grpc_asyncio"), + (EventarcClient, "rest"), + ], +) def test_eventarc_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -364,52 +499,68 @@ def test_eventarc_client_from_service_account_info(client_class, transport_name) assert isinstance(client, client_class) assert client.transport._host == ( - 'eventarc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://eventarc.googleapis.com' + "eventarc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://eventarc.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.EventarcGrpcTransport, "grpc"), - (transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.EventarcRestTransport, "rest"), -]) -def test_eventarc_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.EventarcGrpcTransport, "grpc"), + (transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.EventarcRestTransport, "rest"), + ], +) +def test_eventarc_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (EventarcClient, "grpc"), - (EventarcAsyncClient, "grpc_asyncio"), - (EventarcClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (EventarcClient, "grpc"), + (EventarcAsyncClient, "grpc_asyncio"), + (EventarcClient, "rest"), + ], +) def test_eventarc_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'eventarc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://eventarc.googleapis.com' + "eventarc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://eventarc.googleapis.com" ) @@ -425,30 +576,39 @@ def test_eventarc_client_get_transport_class(): assert transport == transports.EventarcGrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (EventarcClient, transports.EventarcGrpcTransport, "grpc"), - (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), - (EventarcClient, transports.EventarcRestTransport, "rest"), -]) -@mock.patch.object(EventarcClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcClient)) -@mock.patch.object(EventarcAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcAsyncClient)) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (EventarcClient, transports.EventarcGrpcTransport, "grpc"), + (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), + (EventarcClient, transports.EventarcRestTransport, "rest"), + ], +) +@mock.patch.object( + EventarcClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EventarcClient), +) +@mock.patch.object( + EventarcAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EventarcAsyncClient), +) def test_eventarc_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. - with mock.patch.object(EventarcClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(EventarcClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(EventarcClient, 'get_transport_class') as gtc: + with mock.patch.object(EventarcClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -466,13 +626,15 @@ def test_eventarc_client_client_options(client_class, transport_class, transport # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -484,7 +646,7 @@ def test_eventarc_client_client_options(client_class, transport_class, transport # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -504,17 +666,22 @@ def test_eventarc_client_client_options(client_class, transport_class, transport with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -523,48 +690,82 @@ def test_eventarc_client_client_options(client_class, transport_class, transport api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (EventarcClient, transports.EventarcGrpcTransport, "grpc", "true"), - (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (EventarcClient, transports.EventarcGrpcTransport, "grpc", "false"), - (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (EventarcClient, transports.EventarcRestTransport, "rest", "true"), - (EventarcClient, transports.EventarcRestTransport, "rest", "false"), -]) -@mock.patch.object(EventarcClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcClient)) -@mock.patch.object(EventarcAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcAsyncClient)) + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (EventarcClient, transports.EventarcGrpcTransport, "grpc", "true"), + ( + EventarcAsyncClient, + transports.EventarcGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (EventarcClient, transports.EventarcGrpcTransport, "grpc", "false"), + ( + EventarcAsyncClient, + transports.EventarcGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (EventarcClient, transports.EventarcRestTransport, "rest", "true"), + (EventarcClient, transports.EventarcRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + EventarcClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EventarcClient), +) +@mock.patch.object( + EventarcAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EventarcAsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_eventarc_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -583,12 +784,22 @@ def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_ # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -609,15 +820,22 @@ def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_ ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -627,19 +845,27 @@ def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_ ) -@pytest.mark.parametrize("client_class", [ - EventarcClient, EventarcAsyncClient -]) -@mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) -@mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) +@pytest.mark.parametrize("client_class", [EventarcClient, EventarcAsyncClient]) +@mock.patch.object( + EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient) +) +@mock.patch.object( + EventarcAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(EventarcAsyncClient), +) def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -647,18 +873,25 @@ def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -695,23 +928,23 @@ def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -742,23 +975,23 @@ def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -774,16 +1007,27 @@ def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -793,27 +1037,48 @@ def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + -@pytest.mark.parametrize("client_class", [ - EventarcClient, EventarcAsyncClient -]) -@mock.patch.object(EventarcClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcClient)) -@mock.patch.object(EventarcAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcAsyncClient)) +@pytest.mark.parametrize("client_class", [EventarcClient, EventarcAsyncClient]) +@mock.patch.object( + EventarcClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EventarcClient), +) +@mock.patch.object( + EventarcAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(EventarcAsyncClient), +) def test_eventarc_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = EventarcClient._DEFAULT_UNIVERSE - default_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -836,11 +1101,19 @@ def test_eventarc_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -848,27 +1121,36 @@ def test_eventarc_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (EventarcClient, transports.EventarcGrpcTransport, "grpc"), - (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), - (EventarcClient, transports.EventarcRestTransport, "rest"), -]) -def test_eventarc_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (EventarcClient, transports.EventarcGrpcTransport, "grpc"), + (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), + (EventarcClient, transports.EventarcRestTransport, "rest"), + ], +) +def test_eventarc_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -877,24 +1159,35 @@ def test_eventarc_client_client_options_scopes(client_class, transport_class, tr api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (EventarcClient, transports.EventarcGrpcTransport, "grpc", grpc_helpers), - (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (EventarcClient, transports.EventarcRestTransport, "rest", None), -]) -def test_eventarc_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (EventarcClient, transports.EventarcGrpcTransport, "grpc", grpc_helpers), + ( + EventarcAsyncClient, + transports.EventarcGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (EventarcClient, transports.EventarcRestTransport, "rest", None), + ], +) +def test_eventarc_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -903,12 +1196,13 @@ def test_eventarc_client_client_options_credentials_file(client_class, transport api_audience=None, ) + def test_eventarc_client_client_options_from_dict(): - with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcGrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.eventarc_v1.services.eventarc.transports.EventarcGrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None - client = EventarcClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) + client = EventarcClient(client_options={"api_endpoint": "squid.clam.whelk"}) grpc_transport.assert_called_once_with( credentials=None, credentials_file=None, @@ -922,23 +1216,33 @@ def test_eventarc_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (EventarcClient, transports.EventarcGrpcTransport, "grpc", grpc_helpers), - (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_eventarc_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (EventarcClient, transports.EventarcGrpcTransport, "grpc", grpc_helpers), + ( + EventarcAsyncClient, + transports.EventarcGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_eventarc_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -948,13 +1252,13 @@ def test_eventarc_client_create_channel_credentials_file(client_class, transport ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -965,9 +1269,7 @@ def test_eventarc_client_create_channel_credentials_file(client_class, transport credentials=file_creds, credentials_file=None, quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=None, default_host="eventarc.googleapis.com", ssl_credentials=None, @@ -978,11 +1280,14 @@ def test_eventarc_client_create_channel_credentials_file(client_class, transport ) -@pytest.mark.parametrize("request_type", [ - eventarc.GetTriggerRequest, - dict, -]) -def test_get_trigger(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.GetTriggerRequest, + dict, + ], +) +def test_get_trigger(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -993,16 +1298,14 @@ def test_get_trigger(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.get_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', + name="name_value", + uid="uid_value", + service_account="service_account_value", + channel="channel_value", + etag="etag_value", ) response = client.get_trigger(request) @@ -1014,11 +1317,11 @@ def test_get_trigger(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, trigger.Trigger) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.channel == 'channel_value' - assert response.etag == 'etag_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.service_account == "service_account_value" + assert response.channel == "channel_value" + assert response.etag == "etag_value" def test_get_trigger_non_empty_request_with_auto_populated_field(): @@ -1026,28 +1329,29 @@ def test_get_trigger_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.GetTriggerRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_trigger), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_trigger(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetTriggerRequest( - name='name_value', + name="name_value", ) + def test_get_trigger_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1066,7 +1370,9 @@ def test_get_trigger_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_trigger] = mock_rpc request = {} client.get_trigger(request) @@ -1080,8 +1386,11 @@ def test_get_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_trigger_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1095,12 +1404,17 @@ async def test_get_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_trigger in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_trigger + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_trigger] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_trigger + ] = mock_rpc request = {} await client.get_trigger(request) @@ -1114,8 +1428,11 @@ async def test_get_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetTriggerRequest): +async def test_get_trigger_async( + transport: str = "grpc_asyncio", request_type=eventarc.GetTriggerRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1126,17 +1443,17 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.get_trigger), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + trigger.Trigger( + name="name_value", + uid="uid_value", + service_account="service_account_value", + channel="channel_value", + etag="etag_value", + ) + ) response = await client.get_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -1147,17 +1464,18 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e # Establish that the response is the type that we expect. assert isinstance(response, trigger.Trigger) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.channel == 'channel_value' - assert response.etag == 'etag_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.service_account == "service_account_value" + assert response.channel == "channel_value" + assert response.etag == "etag_value" @pytest.mark.asyncio async def test_get_trigger_async_from_dict(): await test_get_trigger_async(request_type=dict) + def test_get_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1167,12 +1485,10 @@ def test_get_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.GetTriggerRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.get_trigger), "__call__") as call: call.return_value = trigger.Trigger() client.get_trigger(request) @@ -1184,9 +1500,9 @@ def test_get_trigger_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1199,12 +1515,10 @@ async def test_get_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.GetTriggerRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.get_trigger), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger()) await client.get_trigger(request) @@ -1216,9 +1530,9 @@ async def test_get_trigger_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_trigger_flattened(): @@ -1227,15 +1541,13 @@ def test_get_trigger_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.get_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = trigger.Trigger() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_trigger( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1243,7 +1555,7 @@ def test_get_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -1257,9 +1569,10 @@ def test_get_trigger_flattened_error(): with pytest.raises(ValueError): client.get_trigger( eventarc.GetTriggerRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_trigger_flattened_async(): client = EventarcAsyncClient( @@ -1267,9 +1580,7 @@ async def test_get_trigger_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.get_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = trigger.Trigger() @@ -1277,7 +1588,7 @@ async def test_get_trigger_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_trigger( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1285,9 +1596,10 @@ async def test_get_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_trigger_flattened_error_async(): client = EventarcAsyncClient( @@ -1299,15 +1611,18 @@ async def test_get_trigger_flattened_error_async(): with pytest.raises(ValueError): await client.get_trigger( eventarc.GetTriggerRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - eventarc.ListTriggersRequest, - dict, -]) -def test_list_triggers(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.ListTriggersRequest, + dict, + ], +) +def test_list_triggers(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1318,13 +1633,11 @@ def test_list_triggers(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_triggers(request) @@ -1336,8 +1649,8 @@ def test_list_triggers(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTriggersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_triggers_non_empty_request_with_auto_populated_field(): @@ -1345,34 +1658,35 @@ def test_list_triggers_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.ListTriggersRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_triggers(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListTriggersRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + filter="filter_value", ) + def test_list_triggers_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1391,7 +1705,9 @@ def test_list_triggers_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_triggers] = mock_rpc request = {} client.list_triggers(request) @@ -1405,8 +1721,11 @@ def test_list_triggers_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_triggers_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1420,12 +1739,17 @@ async def test_list_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_triggers in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_triggers + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_triggers] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_triggers + ] = mock_rpc request = {} await client.list_triggers(request) @@ -1439,8 +1763,11 @@ async def test_list_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListTriggersRequest): +async def test_list_triggers_async( + transport: str = "grpc_asyncio", request_type=eventarc.ListTriggersRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1451,14 +1778,14 @@ async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListTriggersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) response = await client.list_triggers(request) # Establish that the underlying gRPC stub method was called. @@ -1469,14 +1796,15 @@ async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTriggersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_triggers_async_from_dict(): await test_list_triggers_async(request_type=dict) + def test_list_triggers_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1486,12 +1814,10 @@ def test_list_triggers_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.ListTriggersRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: call.return_value = eventarc.ListTriggersResponse() client.list_triggers(request) @@ -1503,9 +1829,9 @@ def test_list_triggers_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1518,13 +1844,13 @@ async def test_list_triggers_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.ListTriggersRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse()) + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListTriggersResponse() + ) await client.list_triggers(request) # Establish that the underlying gRPC stub method was called. @@ -1535,9 +1861,9 @@ async def test_list_triggers_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_triggers_flattened(): @@ -1546,15 +1872,13 @@ def test_list_triggers_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListTriggersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_triggers( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1562,7 +1886,7 @@ def test_list_triggers_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1576,9 +1900,10 @@ def test_list_triggers_flattened_error(): with pytest.raises(ValueError): client.list_triggers( eventarc.ListTriggersRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_triggers_flattened_async(): client = EventarcAsyncClient( @@ -1586,17 +1911,17 @@ async def test_list_triggers_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListTriggersResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListTriggersResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_triggers( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1604,9 +1929,10 @@ async def test_list_triggers_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_triggers_flattened_error_async(): client = EventarcAsyncClient( @@ -1618,7 +1944,7 @@ async def test_list_triggers_flattened_error_async(): with pytest.raises(ValueError): await client.list_triggers( eventarc.ListTriggersRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1629,9 +1955,7 @@ def test_list_triggers_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListTriggersResponse( @@ -1640,17 +1964,17 @@ def test_list_triggers_pager(transport_name: str = "grpc"): trigger.Trigger(), trigger.Trigger(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListTriggersResponse( triggers=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListTriggersResponse( triggers=[ trigger.Trigger(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListTriggersResponse( triggers=[ @@ -1665,9 +1989,7 @@ def test_list_triggers_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_triggers(request={}, retry=retry, timeout=timeout) @@ -1677,8 +1999,9 @@ def test_list_triggers_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, trigger.Trigger) - for i in results) + assert all(isinstance(i, trigger.Trigger) for i in results) + + def test_list_triggers_pages(transport_name: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1686,9 +2009,7 @@ def test_list_triggers_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListTriggersResponse( @@ -1697,17 +2018,17 @@ def test_list_triggers_pages(transport_name: str = "grpc"): trigger.Trigger(), trigger.Trigger(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListTriggersResponse( triggers=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListTriggersResponse( triggers=[ trigger.Trigger(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListTriggersResponse( triggers=[ @@ -1718,9 +2039,10 @@ def test_list_triggers_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_triggers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_triggers_async_pager(): client = EventarcAsyncClient( @@ -1729,8 +2051,8 @@ async def test_list_triggers_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_triggers), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_triggers), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListTriggersResponse( @@ -1739,17 +2061,17 @@ async def test_list_triggers_async_pager(): trigger.Trigger(), trigger.Trigger(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListTriggersResponse( triggers=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListTriggersResponse( triggers=[ trigger.Trigger(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListTriggersResponse( triggers=[ @@ -1759,15 +2081,16 @@ async def test_list_triggers_async_pager(): ), RuntimeError, ) - async_pager = await client.list_triggers(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_triggers( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, trigger.Trigger) - for i in responses) + assert all(isinstance(i, trigger.Trigger) for i in responses) @pytest.mark.asyncio @@ -1778,8 +2101,8 @@ async def test_list_triggers_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_triggers), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_triggers), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListTriggersResponse( @@ -1788,17 +2111,17 @@ async def test_list_triggers_async_pages(): trigger.Trigger(), trigger.Trigger(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListTriggersResponse( triggers=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListTriggersResponse( triggers=[ trigger.Trigger(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListTriggersResponse( triggers=[ @@ -1811,18 +2134,22 @@ async def test_list_triggers_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_triggers(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.CreateTriggerRequest, - dict, -]) -def test_create_trigger(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + eventarc.CreateTriggerRequest, + dict, + ], +) +def test_create_trigger(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1833,11 +2160,9 @@ def test_create_trigger(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.create_trigger), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -1855,30 +2180,31 @@ def test_create_trigger_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.CreateTriggerRequest( - parent='parent_value', - trigger_id='trigger_id_value', + parent="parent_value", + trigger_id="trigger_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_trigger), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_trigger(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateTriggerRequest( - parent='parent_value', - trigger_id='trigger_id_value', + parent="parent_value", + trigger_id="trigger_id_value", ) + def test_create_trigger_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1897,7 +2223,9 @@ def test_create_trigger_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_trigger] = mock_rpc request = {} client.create_trigger(request) @@ -1916,8 +2244,11 @@ def test_create_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_trigger_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1931,12 +2262,17 @@ async def test_create_trigger_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_trigger in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_trigger + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_trigger] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_trigger + ] = mock_rpc request = {} await client.create_trigger(request) @@ -1955,8 +2291,11 @@ async def test_create_trigger_async_use_cached_wrapped_rpc(transport: str = "grp assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateTriggerRequest): +async def test_create_trigger_async( + transport: str = "grpc_asyncio", request_type=eventarc.CreateTriggerRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1967,12 +2306,10 @@ async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_typ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.create_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.create_trigger(request) @@ -1990,6 +2327,7 @@ async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_typ async def test_create_trigger_async_from_dict(): await test_create_trigger_async(request_type=dict) + def test_create_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1999,13 +2337,11 @@ def test_create_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.CreateTriggerRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_trigger), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -2016,9 +2352,9 @@ def test_create_trigger_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2031,13 +2367,13 @@ async def test_create_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.CreateTriggerRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.create_trigger), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.create_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -2048,9 +2384,9 @@ async def test_create_trigger_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_trigger_flattened(): @@ -2059,17 +2395,15 @@ def test_create_trigger_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.create_trigger), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_trigger( - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) # Establish that the underlying call was made with the expected @@ -2077,13 +2411,13 @@ def test_create_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].trigger - mock_val = gce_trigger.Trigger(name='name_value') + mock_val = gce_trigger.Trigger(name="name_value") assert arg == mock_val arg = args[0].trigger_id - mock_val = 'trigger_id_value' + mock_val = "trigger_id_value" assert arg == mock_val @@ -2097,11 +2431,12 @@ def test_create_trigger_flattened_error(): with pytest.raises(ValueError): client.create_trigger( eventarc.CreateTriggerRequest(), - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) + @pytest.mark.asyncio async def test_create_trigger_flattened_async(): client = EventarcAsyncClient( @@ -2109,21 +2444,19 @@ async def test_create_trigger_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.create_trigger), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_trigger( - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) # Establish that the underlying call was made with the expected @@ -2131,15 +2464,16 @@ async def test_create_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].trigger - mock_val = gce_trigger.Trigger(name='name_value') + mock_val = gce_trigger.Trigger(name="name_value") assert arg == mock_val arg = args[0].trigger_id - mock_val = 'trigger_id_value' + mock_val = "trigger_id_value" assert arg == mock_val + @pytest.mark.asyncio async def test_create_trigger_flattened_error_async(): client = EventarcAsyncClient( @@ -2151,17 +2485,20 @@ async def test_create_trigger_flattened_error_async(): with pytest.raises(ValueError): await client.create_trigger( eventarc.CreateTriggerRequest(), - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateTriggerRequest, - dict, -]) -def test_update_trigger(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.UpdateTriggerRequest, + dict, + ], +) +def test_update_trigger(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2172,11 +2509,9 @@ def test_update_trigger(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.update_trigger), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.update_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -2194,25 +2529,24 @@ def test_update_trigger_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = eventarc.UpdateTriggerRequest( - ) + request = eventarc.UpdateTriggerRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_trigger), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_trigger(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateTriggerRequest( - ) + assert args[0] == eventarc.UpdateTriggerRequest() + def test_update_trigger_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2232,7 +2566,9 @@ def test_update_trigger_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_trigger] = mock_rpc request = {} client.update_trigger(request) @@ -2251,8 +2587,11 @@ def test_update_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_trigger_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2266,12 +2605,17 @@ async def test_update_trigger_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_trigger in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_trigger + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_trigger] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_trigger + ] = mock_rpc request = {} await client.update_trigger(request) @@ -2290,8 +2634,11 @@ async def test_update_trigger_async_use_cached_wrapped_rpc(transport: str = "grp assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateTriggerRequest): +async def test_update_trigger_async( + transport: str = "grpc_asyncio", request_type=eventarc.UpdateTriggerRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2302,12 +2649,10 @@ async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_typ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.update_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.update_trigger(request) @@ -2325,6 +2670,7 @@ async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_typ async def test_update_trigger_async_from_dict(): await test_update_trigger_async(request_type=dict) + def test_update_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2334,13 +2680,11 @@ def test_update_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.UpdateTriggerRequest() - request.trigger.name = 'name_value' + request.trigger.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.update_trigger), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -2351,9 +2695,9 @@ def test_update_trigger_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'trigger.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "trigger.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2366,13 +2710,13 @@ async def test_update_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.UpdateTriggerRequest() - request.trigger.name = 'name_value' + request.trigger.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.update_trigger), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.update_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -2383,9 +2727,9 @@ async def test_update_trigger_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'trigger.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "trigger.name=name_value", + ) in kw["metadata"] def test_update_trigger_flattened(): @@ -2394,16 +2738,14 @@ def test_update_trigger_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.update_trigger), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_trigger( - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) @@ -2412,10 +2754,10 @@ def test_update_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].trigger - mock_val = gce_trigger.Trigger(name='name_value') + mock_val = gce_trigger.Trigger(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].allow_missing mock_val = True @@ -2432,11 +2774,12 @@ def test_update_trigger_flattened_error(): with pytest.raises(ValueError): client.update_trigger( eventarc.UpdateTriggerRequest(), - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) + @pytest.mark.asyncio async def test_update_trigger_flattened_async(): client = EventarcAsyncClient( @@ -2444,20 +2787,18 @@ async def test_update_trigger_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.update_trigger), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_trigger( - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) @@ -2466,15 +2807,16 @@ async def test_update_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].trigger - mock_val = gce_trigger.Trigger(name='name_value') + mock_val = gce_trigger.Trigger(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].allow_missing mock_val = True assert arg == mock_val + @pytest.mark.asyncio async def test_update_trigger_flattened_error_async(): client = EventarcAsyncClient( @@ -2486,17 +2828,20 @@ async def test_update_trigger_flattened_error_async(): with pytest.raises(ValueError): await client.update_trigger( eventarc.UpdateTriggerRequest(), - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteTriggerRequest, - dict, -]) -def test_delete_trigger(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.DeleteTriggerRequest, + dict, + ], +) +def test_delete_trigger(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2507,11 +2852,9 @@ def test_delete_trigger(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_trigger), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.delete_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -2529,30 +2872,31 @@ def test_delete_trigger_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.DeleteTriggerRequest( - name='name_value', - etag='etag_value', + name="name_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_trigger), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_trigger(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteTriggerRequest( - name='name_value', - etag='etag_value', + name="name_value", + etag="etag_value", ) + def test_delete_trigger_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2571,7 +2915,9 @@ def test_delete_trigger_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_trigger] = mock_rpc request = {} client.delete_trigger(request) @@ -2590,8 +2936,11 @@ def test_delete_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_trigger_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2605,12 +2954,17 @@ async def test_delete_trigger_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_trigger in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_trigger + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_trigger] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_trigger + ] = mock_rpc request = {} await client.delete_trigger(request) @@ -2629,8 +2983,11 @@ async def test_delete_trigger_async_use_cached_wrapped_rpc(transport: str = "grp assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteTriggerRequest): +async def test_delete_trigger_async( + transport: str = "grpc_asyncio", request_type=eventarc.DeleteTriggerRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2641,12 +2998,10 @@ async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_typ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.delete_trigger(request) @@ -2664,6 +3019,7 @@ async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_typ async def test_delete_trigger_async_from_dict(): await test_delete_trigger_async(request_type=dict) + def test_delete_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2673,13 +3029,11 @@ def test_delete_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.DeleteTriggerRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_trigger), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -2690,9 +3044,9 @@ def test_delete_trigger_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2705,13 +3059,13 @@ async def test_delete_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.DeleteTriggerRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.delete_trigger), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.delete_trigger(request) # Establish that the underlying gRPC stub method was called. @@ -2722,9 +3076,9 @@ async def test_delete_trigger_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_trigger_flattened(): @@ -2733,15 +3087,13 @@ def test_delete_trigger_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_trigger), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_trigger( - name='name_value', + name="name_value", allow_missing=True, ) @@ -2750,7 +3102,7 @@ def test_delete_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].allow_missing mock_val = True @@ -2767,10 +3119,11 @@ def test_delete_trigger_flattened_error(): with pytest.raises(ValueError): client.delete_trigger( eventarc.DeleteTriggerRequest(), - name='name_value', + name="name_value", allow_missing=True, ) + @pytest.mark.asyncio async def test_delete_trigger_flattened_async(): client = EventarcAsyncClient( @@ -2778,19 +3131,17 @@ async def test_delete_trigger_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_trigger), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_trigger( - name='name_value', + name="name_value", allow_missing=True, ) @@ -2799,12 +3150,13 @@ async def test_delete_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].allow_missing mock_val = True assert arg == mock_val + @pytest.mark.asyncio async def test_delete_trigger_flattened_error_async(): client = EventarcAsyncClient( @@ -2816,16 +3168,19 @@ async def test_delete_trigger_flattened_error_async(): with pytest.raises(ValueError): await client.delete_trigger( eventarc.DeleteTriggerRequest(), - name='name_value', + name="name_value", allow_missing=True, ) -@pytest.mark.parametrize("request_type", [ - eventarc.GetChannelRequest, - dict, -]) -def test_get_channel(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.GetChannelRequest, + dict, + ], +) +def test_get_channel(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2836,18 +3191,16 @@ def test_get_channel(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.get_channel), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', + name="name_value", + uid="uid_value", + provider="provider_value", state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - pubsub_topic='pubsub_topic_value', + activation_token="activation_token_value", + crypto_key_name="crypto_key_name_value", + pubsub_topic="pubsub_topic_value", ) response = client.get_channel(request) @@ -2859,12 +3212,12 @@ def test_get_channel(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, channel.Channel) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.provider == 'provider_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.provider == "provider_value" assert response.state == channel.Channel.State.PENDING - assert response.activation_token == 'activation_token_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.activation_token == "activation_token_value" + assert response.crypto_key_name == "crypto_key_name_value" def test_get_channel_non_empty_request_with_auto_populated_field(): @@ -2872,28 +3225,29 @@ def test_get_channel_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.GetChannelRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_channel), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_channel(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetChannelRequest( - name='name_value', + name="name_value", ) + def test_get_channel_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2912,7 +3266,9 @@ def test_get_channel_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_channel] = mock_rpc request = {} client.get_channel(request) @@ -2926,8 +3282,11 @@ def test_get_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_channel_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2941,12 +3300,17 @@ async def test_get_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_channel in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_channel + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_channel] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_channel + ] = mock_rpc request = {} await client.get_channel(request) @@ -2960,8 +3324,11 @@ async def test_get_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelRequest): +async def test_get_channel_async( + transport: str = "grpc_asyncio", request_type=eventarc.GetChannelRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2972,18 +3339,18 @@ async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=e request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.get_channel), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', - state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + channel.Channel( + name="name_value", + uid="uid_value", + provider="provider_value", + state=channel.Channel.State.PENDING, + activation_token="activation_token_value", + crypto_key_name="crypto_key_name_value", + ) + ) response = await client.get_channel(request) # Establish that the underlying gRPC stub method was called. @@ -2994,18 +3361,19 @@ async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=e # Establish that the response is the type that we expect. assert isinstance(response, channel.Channel) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.provider == 'provider_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.provider == "provider_value" assert response.state == channel.Channel.State.PENDING - assert response.activation_token == 'activation_token_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.activation_token == "activation_token_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.asyncio async def test_get_channel_async_from_dict(): await test_get_channel_async(request_type=dict) + def test_get_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3015,12 +3383,10 @@ def test_get_channel_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.GetChannelRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.get_channel), "__call__") as call: call.return_value = channel.Channel() client.get_channel(request) @@ -3032,9 +3398,9 @@ def test_get_channel_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3047,12 +3413,10 @@ async def test_get_channel_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.GetChannelRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.get_channel), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel()) await client.get_channel(request) @@ -3064,9 +3428,9 @@ async def test_get_channel_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_channel_flattened(): @@ -3075,15 +3439,13 @@ def test_get_channel_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.get_channel), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = channel.Channel() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_channel( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3091,7 +3453,7 @@ def test_get_channel_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -3105,9 +3467,10 @@ def test_get_channel_flattened_error(): with pytest.raises(ValueError): client.get_channel( eventarc.GetChannelRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_channel_flattened_async(): client = EventarcAsyncClient( @@ -3115,9 +3478,7 @@ async def test_get_channel_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.get_channel), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = channel.Channel() @@ -3125,7 +3486,7 @@ async def test_get_channel_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_channel( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3133,9 +3494,10 @@ async def test_get_channel_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_channel_flattened_error_async(): client = EventarcAsyncClient( @@ -3147,15 +3509,18 @@ async def test_get_channel_flattened_error_async(): with pytest.raises(ValueError): await client.get_channel( eventarc.GetChannelRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - eventarc.ListChannelsRequest, - dict, -]) -def test_list_channels(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.ListChannelsRequest, + dict, + ], +) +def test_list_channels(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3166,13 +3531,11 @@ def test_list_channels(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_channels(request) @@ -3184,8 +3547,8 @@ def test_list_channels(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_channels_non_empty_request_with_auto_populated_field(): @@ -3193,32 +3556,33 @@ def test_list_channels_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.ListChannelsRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_channels(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListChannelsRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", ) + def test_list_channels_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3237,7 +3601,9 @@ def test_list_channels_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_channels] = mock_rpc request = {} client.list_channels(request) @@ -3251,8 +3617,11 @@ def test_list_channels_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_channels_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_channels_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3266,12 +3635,17 @@ async def test_list_channels_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_channels in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_channels + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_channels] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_channels + ] = mock_rpc request = {} await client.list_channels(request) @@ -3285,8 +3659,11 @@ async def test_list_channels_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelsRequest): +async def test_list_channels_async( + transport: str = "grpc_asyncio", request_type=eventarc.ListChannelsRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3297,14 +3674,14 @@ async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListChannelsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) response = await client.list_channels(request) # Establish that the underlying gRPC stub method was called. @@ -3315,14 +3692,15 @@ async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_channels_async_from_dict(): await test_list_channels_async(request_type=dict) + def test_list_channels_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3332,12 +3710,10 @@ def test_list_channels_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.ListChannelsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: call.return_value = eventarc.ListChannelsResponse() client.list_channels(request) @@ -3349,9 +3725,9 @@ def test_list_channels_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3364,13 +3740,13 @@ async def test_list_channels_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.ListChannelsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse()) + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListChannelsResponse() + ) await client.list_channels(request) # Establish that the underlying gRPC stub method was called. @@ -3381,9 +3757,9 @@ async def test_list_channels_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_channels_flattened(): @@ -3392,15 +3768,13 @@ def test_list_channels_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListChannelsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_channels( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3408,7 +3782,7 @@ def test_list_channels_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -3422,9 +3796,10 @@ def test_list_channels_flattened_error(): with pytest.raises(ValueError): client.list_channels( eventarc.ListChannelsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_channels_flattened_async(): client = EventarcAsyncClient( @@ -3432,17 +3807,17 @@ async def test_list_channels_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListChannelsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListChannelsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_channels( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3450,9 +3825,10 @@ async def test_list_channels_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_channels_flattened_error_async(): client = EventarcAsyncClient( @@ -3464,7 +3840,7 @@ async def test_list_channels_flattened_error_async(): with pytest.raises(ValueError): await client.list_channels( eventarc.ListChannelsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -3475,9 +3851,7 @@ def test_list_channels_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListChannelsResponse( @@ -3486,17 +3860,17 @@ def test_list_channels_pager(transport_name: str = "grpc"): channel.Channel(), channel.Channel(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListChannelsResponse( channels=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListChannelsResponse( channels=[ channel.Channel(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListChannelsResponse( channels=[ @@ -3511,9 +3885,7 @@ def test_list_channels_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_channels(request={}, retry=retry, timeout=timeout) @@ -3523,8 +3895,9 @@ def test_list_channels_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, channel.Channel) - for i in results) + assert all(isinstance(i, channel.Channel) for i in results) + + def test_list_channels_pages(transport_name: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3532,9 +3905,7 @@ def test_list_channels_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListChannelsResponse( @@ -3543,17 +3914,17 @@ def test_list_channels_pages(transport_name: str = "grpc"): channel.Channel(), channel.Channel(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListChannelsResponse( channels=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListChannelsResponse( channels=[ channel.Channel(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListChannelsResponse( channels=[ @@ -3564,9 +3935,10 @@ def test_list_channels_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_channels(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_channels_async_pager(): client = EventarcAsyncClient( @@ -3575,8 +3947,8 @@ async def test_list_channels_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channels), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_channels), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListChannelsResponse( @@ -3585,17 +3957,17 @@ async def test_list_channels_async_pager(): channel.Channel(), channel.Channel(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListChannelsResponse( channels=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListChannelsResponse( channels=[ channel.Channel(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListChannelsResponse( channels=[ @@ -3605,15 +3977,16 @@ async def test_list_channels_async_pager(): ), RuntimeError, ) - async_pager = await client.list_channels(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_channels( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, channel.Channel) - for i in responses) + assert all(isinstance(i, channel.Channel) for i in responses) @pytest.mark.asyncio @@ -3624,8 +3997,8 @@ async def test_list_channels_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channels), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_channels), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListChannelsResponse( @@ -3634,17 +4007,17 @@ async def test_list_channels_async_pages(): channel.Channel(), channel.Channel(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListChannelsResponse( channels=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListChannelsResponse( channels=[ channel.Channel(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListChannelsResponse( channels=[ @@ -3657,18 +4030,22 @@ async def test_list_channels_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_channels(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.CreateChannelRequest, - dict, -]) -def test_create_channel(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + eventarc.CreateChannelRequest, + dict, + ], +) +def test_create_channel(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3679,11 +4056,9 @@ def test_create_channel(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: + with mock.patch.object(type(client.transport.create_channel_), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_channel(request) # Establish that the underlying gRPC stub method was called. @@ -3701,30 +4076,31 @@ def test_create_channel_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.CreateChannelRequest( - parent='parent_value', - channel_id='channel_id_value', + parent="parent_value", + channel_id="channel_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_channel_), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_channel(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateChannelRequest( - parent='parent_value', - channel_id='channel_id_value', + parent="parent_value", + channel_id="channel_id_value", ) + def test_create_channel_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3743,7 +4119,9 @@ def test_create_channel_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_channel_] = mock_rpc request = {} client.create_channel(request) @@ -3762,8 +4140,11 @@ def test_create_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_channel_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3777,12 +4158,17 @@ async def test_create_channel_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_channel_ in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_channel_ + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_channel_] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_channel_ + ] = mock_rpc request = {} await client.create_channel(request) @@ -3801,8 +4187,11 @@ async def test_create_channel_async_use_cached_wrapped_rpc(transport: str = "grp assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelRequest): +async def test_create_channel_async( + transport: str = "grpc_asyncio", request_type=eventarc.CreateChannelRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3813,12 +4202,10 @@ async def test_create_channel_async(transport: str = 'grpc_asyncio', request_typ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: + with mock.patch.object(type(client.transport.create_channel_), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.create_channel(request) @@ -3836,6 +4223,7 @@ async def test_create_channel_async(transport: str = 'grpc_asyncio', request_typ async def test_create_channel_async_from_dict(): await test_create_channel_async(request_type=dict) + def test_create_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3845,13 +4233,11 @@ def test_create_channel_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.CreateChannelRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_channel_), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_channel(request) # Establish that the underlying gRPC stub method was called. @@ -3862,9 +4248,9 @@ def test_create_channel_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3877,13 +4263,13 @@ async def test_create_channel_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.CreateChannelRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.create_channel_), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.create_channel(request) # Establish that the underlying gRPC stub method was called. @@ -3894,9 +4280,9 @@ async def test_create_channel_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_channel_flattened(): @@ -3905,17 +4291,15 @@ def test_create_channel_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: + with mock.patch.object(type(client.transport.create_channel_), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_channel( - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) # Establish that the underlying call was made with the expected @@ -3923,13 +4307,13 @@ def test_create_channel_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].channel - mock_val = gce_channel.Channel(name='name_value') + mock_val = gce_channel.Channel(name="name_value") assert arg == mock_val arg = args[0].channel_id - mock_val = 'channel_id_value' + mock_val = "channel_id_value" assert arg == mock_val @@ -3943,11 +4327,12 @@ def test_create_channel_flattened_error(): with pytest.raises(ValueError): client.create_channel( eventarc.CreateChannelRequest(), - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) + @pytest.mark.asyncio async def test_create_channel_flattened_async(): client = EventarcAsyncClient( @@ -3955,21 +4340,19 @@ async def test_create_channel_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: + with mock.patch.object(type(client.transport.create_channel_), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_channel( - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) # Establish that the underlying call was made with the expected @@ -3977,15 +4360,16 @@ async def test_create_channel_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].channel - mock_val = gce_channel.Channel(name='name_value') + mock_val = gce_channel.Channel(name="name_value") assert arg == mock_val arg = args[0].channel_id - mock_val = 'channel_id_value' + mock_val = "channel_id_value" assert arg == mock_val + @pytest.mark.asyncio async def test_create_channel_flattened_error_async(): client = EventarcAsyncClient( @@ -3997,17 +4381,20 @@ async def test_create_channel_flattened_error_async(): with pytest.raises(ValueError): await client.create_channel( eventarc.CreateChannelRequest(), - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateChannelRequest, - dict, -]) -def test_update_channel(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.UpdateChannelRequest, + dict, + ], +) +def test_update_channel(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4018,11 +4405,9 @@ def test_update_channel(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.update_channel), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.update_channel(request) # Establish that the underlying gRPC stub method was called. @@ -4040,25 +4425,24 @@ def test_update_channel_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = eventarc.UpdateChannelRequest( - ) + request = eventarc.UpdateChannelRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_channel), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_channel(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateChannelRequest( - ) + assert args[0] == eventarc.UpdateChannelRequest() + def test_update_channel_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4078,7 +4462,9 @@ def test_update_channel_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_channel] = mock_rpc request = {} client.update_channel(request) @@ -4097,8 +4483,11 @@ def test_update_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_channel_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4112,12 +4501,17 @@ async def test_update_channel_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_channel in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_channel + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_channel] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_channel + ] = mock_rpc request = {} await client.update_channel(request) @@ -4136,8 +4530,11 @@ async def test_update_channel_async_use_cached_wrapped_rpc(transport: str = "grp assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateChannelRequest): +async def test_update_channel_async( + transport: str = "grpc_asyncio", request_type=eventarc.UpdateChannelRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4148,12 +4545,10 @@ async def test_update_channel_async(transport: str = 'grpc_asyncio', request_typ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.update_channel), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.update_channel(request) @@ -4171,6 +4566,7 @@ async def test_update_channel_async(transport: str = 'grpc_asyncio', request_typ async def test_update_channel_async_from_dict(): await test_update_channel_async(request_type=dict) + def test_update_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4180,13 +4576,11 @@ def test_update_channel_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.UpdateChannelRequest() - request.channel.name = 'name_value' + request.channel.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.update_channel), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_channel(request) # Establish that the underlying gRPC stub method was called. @@ -4197,9 +4591,9 @@ def test_update_channel_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'channel.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "channel.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4212,13 +4606,13 @@ async def test_update_channel_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.UpdateChannelRequest() - request.channel.name = 'name_value' + request.channel.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.update_channel), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.update_channel(request) # Establish that the underlying gRPC stub method was called. @@ -4229,9 +4623,9 @@ async def test_update_channel_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'channel.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "channel.name=name_value", + ) in kw["metadata"] def test_update_channel_flattened(): @@ -4240,16 +4634,14 @@ def test_update_channel_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.update_channel), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_channel( - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -4257,10 +4649,10 @@ def test_update_channel_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].channel - mock_val = gce_channel.Channel(name='name_value') + mock_val = gce_channel.Channel(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -4274,10 +4666,11 @@ def test_update_channel_flattened_error(): with pytest.raises(ValueError): client.update_channel( eventarc.UpdateChannelRequest(), - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test_update_channel_flattened_async(): client = EventarcAsyncClient( @@ -4285,20 +4678,18 @@ async def test_update_channel_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.update_channel), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_channel( - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -4306,12 +4697,13 @@ async def test_update_channel_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].channel - mock_val = gce_channel.Channel(name='name_value') + mock_val = gce_channel.Channel(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test_update_channel_flattened_error_async(): client = EventarcAsyncClient( @@ -4323,16 +4715,19 @@ async def test_update_channel_flattened_error_async(): with pytest.raises(ValueError): await client.update_channel( eventarc.UpdateChannelRequest(), - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteChannelRequest, - dict, -]) -def test_delete_channel(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.DeleteChannelRequest, + dict, + ], +) +def test_delete_channel(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4343,11 +4738,9 @@ def test_delete_channel(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_channel), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.delete_channel(request) # Establish that the underlying gRPC stub method was called. @@ -4365,28 +4758,29 @@ def test_delete_channel_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.DeleteChannelRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_channel), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_channel(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteChannelRequest( - name='name_value', + name="name_value", ) + def test_delete_channel_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4405,7 +4799,9 @@ def test_delete_channel_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_channel] = mock_rpc request = {} client.delete_channel(request) @@ -4424,8 +4820,11 @@ def test_delete_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_channel_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4439,12 +4838,17 @@ async def test_delete_channel_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_channel in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_channel + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_channel] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_channel + ] = mock_rpc request = {} await client.delete_channel(request) @@ -4463,8 +4867,11 @@ async def test_delete_channel_async_use_cached_wrapped_rpc(transport: str = "grp assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelRequest): +async def test_delete_channel_async( + transport: str = "grpc_asyncio", request_type=eventarc.DeleteChannelRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4475,12 +4882,10 @@ async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_typ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_channel), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.delete_channel(request) @@ -4498,6 +4903,7 @@ async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_typ async def test_delete_channel_async_from_dict(): await test_delete_channel_async(request_type=dict) + def test_delete_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4507,13 +4913,11 @@ def test_delete_channel_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.DeleteChannelRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_channel), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_channel(request) # Establish that the underlying gRPC stub method was called. @@ -4524,9 +4928,9 @@ def test_delete_channel_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4539,13 +4943,13 @@ async def test_delete_channel_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.DeleteChannelRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.delete_channel), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.delete_channel(request) # Establish that the underlying gRPC stub method was called. @@ -4556,9 +4960,9 @@ async def test_delete_channel_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_channel_flattened(): @@ -4567,15 +4971,13 @@ def test_delete_channel_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_channel), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_channel( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4583,7 +4985,7 @@ def test_delete_channel_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -4597,9 +4999,10 @@ def test_delete_channel_flattened_error(): with pytest.raises(ValueError): client.delete_channel( eventarc.DeleteChannelRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_channel_flattened_async(): client = EventarcAsyncClient( @@ -4607,19 +5010,17 @@ async def test_delete_channel_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_channel), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_channel( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4627,9 +5028,10 @@ async def test_delete_channel_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_channel_flattened_error_async(): client = EventarcAsyncClient( @@ -4641,15 +5043,18 @@ async def test_delete_channel_flattened_error_async(): with pytest.raises(ValueError): await client.delete_channel( eventarc.DeleteChannelRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - eventarc.GetProviderRequest, - dict, -]) -def test_get_provider(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.GetProviderRequest, + dict, + ], +) +def test_get_provider(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4660,13 +5065,11 @@ def test_get_provider(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: + with mock.patch.object(type(client.transport.get_provider), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = discovery.Provider( - name='name_value', - display_name='display_name_value', + name="name_value", + display_name="display_name_value", ) response = client.get_provider(request) @@ -4678,8 +5081,8 @@ def test_get_provider(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, discovery.Provider) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" def test_get_provider_non_empty_request_with_auto_populated_field(): @@ -4687,28 +5090,29 @@ def test_get_provider_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.GetProviderRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_provider), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_provider(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetProviderRequest( - name='name_value', + name="name_value", ) + def test_get_provider_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4727,7 +5131,9 @@ def test_get_provider_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_provider] = mock_rpc request = {} client.get_provider(request) @@ -4741,8 +5147,11 @@ def test_get_provider_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_provider_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_provider_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4756,12 +5165,17 @@ async def test_get_provider_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_provider in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_provider + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_provider] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_provider + ] = mock_rpc request = {} await client.get_provider(request) @@ -4775,8 +5189,11 @@ async def test_get_provider_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetProviderRequest): +async def test_get_provider_async( + transport: str = "grpc_asyncio", request_type=eventarc.GetProviderRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4787,14 +5204,14 @@ async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: + with mock.patch.object(type(client.transport.get_provider), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( - name='name_value', - display_name='display_name_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discovery.Provider( + name="name_value", + display_name="display_name_value", + ) + ) response = await client.get_provider(request) # Establish that the underlying gRPC stub method was called. @@ -4805,14 +5222,15 @@ async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, discovery.Provider) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" @pytest.mark.asyncio async def test_get_provider_async_from_dict(): await test_get_provider_async(request_type=dict) + def test_get_provider_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4822,12 +5240,10 @@ def test_get_provider_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.GetProviderRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: + with mock.patch.object(type(client.transport.get_provider), "__call__") as call: call.return_value = discovery.Provider() client.get_provider(request) @@ -4839,9 +5255,9 @@ def test_get_provider_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4854,12 +5270,10 @@ async def test_get_provider_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.GetProviderRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: + with mock.patch.object(type(client.transport.get_provider), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider()) await client.get_provider(request) @@ -4871,9 +5285,9 @@ async def test_get_provider_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_provider_flattened(): @@ -4882,15 +5296,13 @@ def test_get_provider_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: + with mock.patch.object(type(client.transport.get_provider), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = discovery.Provider() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_provider( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4898,7 +5310,7 @@ def test_get_provider_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -4912,9 +5324,10 @@ def test_get_provider_flattened_error(): with pytest.raises(ValueError): client.get_provider( eventarc.GetProviderRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_provider_flattened_async(): client = EventarcAsyncClient( @@ -4922,9 +5335,7 @@ async def test_get_provider_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: + with mock.patch.object(type(client.transport.get_provider), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = discovery.Provider() @@ -4932,7 +5343,7 @@ async def test_get_provider_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_provider( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4940,9 +5351,10 @@ async def test_get_provider_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_provider_flattened_error_async(): client = EventarcAsyncClient( @@ -4954,15 +5366,18 @@ async def test_get_provider_flattened_error_async(): with pytest.raises(ValueError): await client.get_provider( eventarc.GetProviderRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - eventarc.ListProvidersRequest, - dict, -]) -def test_list_providers(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.ListProvidersRequest, + dict, + ], +) +def test_list_providers(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4973,13 +5388,11 @@ def test_list_providers(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_providers(request) @@ -4991,8 +5404,8 @@ def test_list_providers(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListProvidersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_providers_non_empty_request_with_auto_populated_field(): @@ -5000,34 +5413,35 @@ def test_list_providers_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.ListProvidersRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_providers(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListProvidersRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + filter="filter_value", ) + def test_list_providers_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5046,7 +5460,9 @@ def test_list_providers_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_providers] = mock_rpc request = {} client.list_providers(request) @@ -5060,8 +5476,11 @@ def test_list_providers_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_providers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_providers_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5075,12 +5494,17 @@ async def test_list_providers_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_providers in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_providers + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_providers] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_providers + ] = mock_rpc request = {} await client.list_providers(request) @@ -5094,8 +5518,11 @@ async def test_list_providers_async_use_cached_wrapped_rpc(transport: str = "grp assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_providers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListProvidersRequest): +async def test_list_providers_async( + transport: str = "grpc_asyncio", request_type=eventarc.ListProvidersRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5106,14 +5533,14 @@ async def test_list_providers_async(transport: str = 'grpc_asyncio', request_typ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListProvidersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) response = await client.list_providers(request) # Establish that the underlying gRPC stub method was called. @@ -5124,14 +5551,15 @@ async def test_list_providers_async(transport: str = 'grpc_asyncio', request_typ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListProvidersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_providers_async_from_dict(): await test_list_providers_async(request_type=dict) + def test_list_providers_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5141,12 +5569,10 @@ def test_list_providers_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.ListProvidersRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: call.return_value = eventarc.ListProvidersResponse() client.list_providers(request) @@ -5158,9 +5584,9 @@ def test_list_providers_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5173,13 +5599,13 @@ async def test_list_providers_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.ListProvidersRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse()) + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListProvidersResponse() + ) await client.list_providers(request) # Establish that the underlying gRPC stub method was called. @@ -5190,9 +5616,9 @@ async def test_list_providers_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_providers_flattened(): @@ -5201,15 +5627,13 @@ def test_list_providers_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListProvidersResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_providers( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -5217,7 +5641,7 @@ def test_list_providers_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -5231,9 +5655,10 @@ def test_list_providers_flattened_error(): with pytest.raises(ValueError): client.list_providers( eventarc.ListProvidersRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_providers_flattened_async(): client = EventarcAsyncClient( @@ -5241,17 +5666,17 @@ async def test_list_providers_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListProvidersResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListProvidersResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_providers( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -5259,9 +5684,10 @@ async def test_list_providers_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_providers_flattened_error_async(): client = EventarcAsyncClient( @@ -5273,7 +5699,7 @@ async def test_list_providers_flattened_error_async(): with pytest.raises(ValueError): await client.list_providers( eventarc.ListProvidersRequest(), - parent='parent_value', + parent="parent_value", ) @@ -5284,9 +5710,7 @@ def test_list_providers_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListProvidersResponse( @@ -5295,17 +5719,17 @@ def test_list_providers_pager(transport_name: str = "grpc"): discovery.Provider(), discovery.Provider(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListProvidersResponse( providers=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListProvidersResponse( providers=[ discovery.Provider(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListProvidersResponse( providers=[ @@ -5320,9 +5744,7 @@ def test_list_providers_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_providers(request={}, retry=retry, timeout=timeout) @@ -5332,8 +5754,9 @@ def test_list_providers_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, discovery.Provider) - for i in results) + assert all(isinstance(i, discovery.Provider) for i in results) + + def test_list_providers_pages(transport_name: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5341,9 +5764,7 @@ def test_list_providers_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListProvidersResponse( @@ -5352,17 +5773,17 @@ def test_list_providers_pages(transport_name: str = "grpc"): discovery.Provider(), discovery.Provider(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListProvidersResponse( providers=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListProvidersResponse( providers=[ discovery.Provider(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListProvidersResponse( providers=[ @@ -5373,9 +5794,10 @@ def test_list_providers_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_providers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_providers_async_pager(): client = EventarcAsyncClient( @@ -5384,8 +5806,8 @@ async def test_list_providers_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_providers), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_providers), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListProvidersResponse( @@ -5394,17 +5816,17 @@ async def test_list_providers_async_pager(): discovery.Provider(), discovery.Provider(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListProvidersResponse( providers=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListProvidersResponse( providers=[ discovery.Provider(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListProvidersResponse( providers=[ @@ -5414,15 +5836,16 @@ async def test_list_providers_async_pager(): ), RuntimeError, ) - async_pager = await client.list_providers(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_providers( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, discovery.Provider) - for i in responses) + assert all(isinstance(i, discovery.Provider) for i in responses) @pytest.mark.asyncio @@ -5433,8 +5856,8 @@ async def test_list_providers_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_providers), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_providers), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListProvidersResponse( @@ -5443,17 +5866,17 @@ async def test_list_providers_async_pages(): discovery.Provider(), discovery.Provider(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListProvidersResponse( providers=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListProvidersResponse( providers=[ discovery.Provider(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListProvidersResponse( providers=[ @@ -5466,18 +5889,22 @@ async def test_list_providers_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_providers(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.GetChannelConnectionRequest, - dict, -]) -def test_get_channel_connection(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + eventarc.GetChannelConnectionRequest, + dict, + ], +) +def test_get_channel_connection(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5489,14 +5916,14 @@ def test_get_channel_connection(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: + type(client.transport.get_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', + name="name_value", + uid="uid_value", + channel="channel_value", + activation_token="activation_token_value", ) response = client.get_channel_connection(request) @@ -5508,10 +5935,10 @@ def test_get_channel_connection(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, channel_connection.ChannelConnection) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.channel == 'channel_value' - assert response.activation_token == 'activation_token_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.channel == "channel_value" + assert response.activation_token == "activation_token_value" def test_get_channel_connection_non_empty_request_with_auto_populated_field(): @@ -5519,28 +5946,31 @@ def test_get_channel_connection_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.GetChannelConnectionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.get_channel_connection), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_channel_connection(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetChannelConnectionRequest( - name='name_value', + name="name_value", ) + def test_get_channel_connection_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5555,12 +5985,19 @@ def test_get_channel_connection_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_channel_connection in client._transport._wrapped_methods + assert ( + client._transport.get_channel_connection + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_channel_connection] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_channel_connection] = ( + mock_rpc + ) request = {} client.get_channel_connection(request) @@ -5573,8 +6010,11 @@ def test_get_channel_connection_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_channel_connection_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5588,12 +6028,17 @@ async def test_get_channel_connection_async_use_cached_wrapped_rpc(transport: st wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_channel_connection in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_channel_connection + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_channel_connection] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_channel_connection + ] = mock_rpc request = {} await client.get_channel_connection(request) @@ -5607,8 +6052,11 @@ async def test_get_channel_connection_async_use_cached_wrapped_rpc(transport: st assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelConnectionRequest): +async def test_get_channel_connection_async( + transport: str = "grpc_asyncio", request_type=eventarc.GetChannelConnectionRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5620,15 +6068,17 @@ async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', req # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: + type(client.transport.get_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + channel_connection.ChannelConnection( + name="name_value", + uid="uid_value", + channel="channel_value", + activation_token="activation_token_value", + ) + ) response = await client.get_channel_connection(request) # Establish that the underlying gRPC stub method was called. @@ -5639,16 +6089,17 @@ async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', req # Establish that the response is the type that we expect. assert isinstance(response, channel_connection.ChannelConnection) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.channel == 'channel_value' - assert response.activation_token == 'activation_token_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.channel == "channel_value" + assert response.activation_token == "activation_token_value" @pytest.mark.asyncio async def test_get_channel_connection_async_from_dict(): await test_get_channel_connection_async(request_type=dict) + def test_get_channel_connection_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5658,12 +6109,12 @@ def test_get_channel_connection_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.GetChannelConnectionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: + type(client.transport.get_channel_connection), "__call__" + ) as call: call.return_value = channel_connection.ChannelConnection() client.get_channel_connection(request) @@ -5675,9 +6126,9 @@ def test_get_channel_connection_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5690,13 +6141,15 @@ async def test_get_channel_connection_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.GetChannelConnectionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection()) + type(client.transport.get_channel_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + channel_connection.ChannelConnection() + ) await client.get_channel_connection(request) # Establish that the underlying gRPC stub method was called. @@ -5707,9 +6160,9 @@ async def test_get_channel_connection_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_channel_connection_flattened(): @@ -5719,14 +6172,14 @@ def test_get_channel_connection_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: + type(client.transport.get_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = channel_connection.ChannelConnection() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_channel_connection( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -5734,7 +6187,7 @@ def test_get_channel_connection_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -5748,9 +6201,10 @@ def test_get_channel_connection_flattened_error(): with pytest.raises(ValueError): client.get_channel_connection( eventarc.GetChannelConnectionRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_channel_connection_flattened_async(): client = EventarcAsyncClient( @@ -5759,16 +6213,18 @@ async def test_get_channel_connection_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: + type(client.transport.get_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = channel_connection.ChannelConnection() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + channel_connection.ChannelConnection() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_channel_connection( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -5776,9 +6232,10 @@ async def test_get_channel_connection_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_channel_connection_flattened_error_async(): client = EventarcAsyncClient( @@ -5790,15 +6247,18 @@ async def test_get_channel_connection_flattened_error_async(): with pytest.raises(ValueError): await client.get_channel_connection( eventarc.GetChannelConnectionRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - eventarc.ListChannelConnectionsRequest, - dict, -]) -def test_list_channel_connections(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.ListChannelConnectionsRequest, + dict, + ], +) +def test_list_channel_connections(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5810,12 +6270,12 @@ def test_list_channel_connections(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: + type(client.transport.list_channel_connections), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_channel_connections(request) @@ -5827,8 +6287,8 @@ def test_list_channel_connections(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_channel_connections_non_empty_request_with_auto_populated_field(): @@ -5836,30 +6296,33 @@ def test_list_channel_connections_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.ListChannelConnectionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.list_channel_connections), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_channel_connections(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListChannelConnectionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_channel_connections_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5874,12 +6337,19 @@ def test_list_channel_connections_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_channel_connections in client._transport._wrapped_methods + assert ( + client._transport.list_channel_connections + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_channel_connections] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_channel_connections + ] = mock_rpc request = {} client.list_channel_connections(request) @@ -5892,8 +6362,11 @@ def test_list_channel_connections_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_channel_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_channel_connections_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5907,12 +6380,17 @@ async def test_list_channel_connections_async_use_cached_wrapped_rpc(transport: wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_channel_connections in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_channel_connections + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_channel_connections] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_channel_connections + ] = mock_rpc request = {} await client.list_channel_connections(request) @@ -5926,8 +6404,11 @@ async def test_list_channel_connections_async_use_cached_wrapped_rpc(transport: assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelConnectionsRequest): +async def test_list_channel_connections_async( + transport: str = "grpc_asyncio", request_type=eventarc.ListChannelConnectionsRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5939,13 +6420,15 @@ async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', r # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: + type(client.transport.list_channel_connections), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListChannelConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) response = await client.list_channel_connections(request) # Establish that the underlying gRPC stub method was called. @@ -5956,14 +6439,15 @@ async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', r # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelConnectionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_channel_connections_async_from_dict(): await test_list_channel_connections_async(request_type=dict) + def test_list_channel_connections_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5973,12 +6457,12 @@ def test_list_channel_connections_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.ListChannelConnectionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: + type(client.transport.list_channel_connections), "__call__" + ) as call: call.return_value = eventarc.ListChannelConnectionsResponse() client.list_channel_connections(request) @@ -5990,9 +6474,9 @@ def test_list_channel_connections_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6005,13 +6489,15 @@ async def test_list_channel_connections_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.ListChannelConnectionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse()) + type(client.transport.list_channel_connections), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListChannelConnectionsResponse() + ) await client.list_channel_connections(request) # Establish that the underlying gRPC stub method was called. @@ -6022,9 +6508,9 @@ async def test_list_channel_connections_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_channel_connections_flattened(): @@ -6034,14 +6520,14 @@ def test_list_channel_connections_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: + type(client.transport.list_channel_connections), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListChannelConnectionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_channel_connections( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -6049,7 +6535,7 @@ def test_list_channel_connections_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -6063,9 +6549,10 @@ def test_list_channel_connections_flattened_error(): with pytest.raises(ValueError): client.list_channel_connections( eventarc.ListChannelConnectionsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_channel_connections_flattened_async(): client = EventarcAsyncClient( @@ -6074,16 +6561,18 @@ async def test_list_channel_connections_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: + type(client.transport.list_channel_connections), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListChannelConnectionsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListChannelConnectionsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_channel_connections( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -6091,9 +6580,10 @@ async def test_list_channel_connections_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_channel_connections_flattened_error_async(): client = EventarcAsyncClient( @@ -6105,7 +6595,7 @@ async def test_list_channel_connections_flattened_error_async(): with pytest.raises(ValueError): await client.list_channel_connections( eventarc.ListChannelConnectionsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -6117,8 +6607,8 @@ def test_list_channel_connections_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: + type(client.transport.list_channel_connections), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListChannelConnectionsResponse( @@ -6127,17 +6617,17 @@ def test_list_channel_connections_pager(transport_name: str = "grpc"): channel_connection.ChannelConnection(), channel_connection.ChannelConnection(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListChannelConnectionsResponse( channel_connections=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListChannelConnectionsResponse( channel_connections=[ channel_connection.ChannelConnection(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListChannelConnectionsResponse( channel_connections=[ @@ -6152,11 +6642,11 @@ def test_list_channel_connections_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_channel_connections( + request={}, retry=retry, timeout=timeout ) - pager = client.list_channel_connections(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -6164,8 +6654,9 @@ def test_list_channel_connections_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, channel_connection.ChannelConnection) - for i in results) + assert all(isinstance(i, channel_connection.ChannelConnection) for i in results) + + def test_list_channel_connections_pages(transport_name: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6174,8 +6665,8 @@ def test_list_channel_connections_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: + type(client.transport.list_channel_connections), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListChannelConnectionsResponse( @@ -6184,17 +6675,17 @@ def test_list_channel_connections_pages(transport_name: str = "grpc"): channel_connection.ChannelConnection(), channel_connection.ChannelConnection(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListChannelConnectionsResponse( channel_connections=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListChannelConnectionsResponse( channel_connections=[ channel_connection.ChannelConnection(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListChannelConnectionsResponse( channel_connections=[ @@ -6205,9 +6696,10 @@ def test_list_channel_connections_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_channel_connections(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_channel_connections_async_pager(): client = EventarcAsyncClient( @@ -6216,8 +6708,10 @@ async def test_list_channel_connections_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_channel_connections), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListChannelConnectionsResponse( @@ -6226,17 +6720,17 @@ async def test_list_channel_connections_async_pager(): channel_connection.ChannelConnection(), channel_connection.ChannelConnection(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListChannelConnectionsResponse( channel_connections=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListChannelConnectionsResponse( channel_connections=[ channel_connection.ChannelConnection(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListChannelConnectionsResponse( channel_connections=[ @@ -6246,15 +6740,18 @@ async def test_list_channel_connections_async_pager(): ), RuntimeError, ) - async_pager = await client.list_channel_connections(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_channel_connections( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, channel_connection.ChannelConnection) - for i in responses) + assert all( + isinstance(i, channel_connection.ChannelConnection) for i in responses + ) @pytest.mark.asyncio @@ -6265,8 +6762,10 @@ async def test_list_channel_connections_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_channel_connections), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( eventarc.ListChannelConnectionsResponse( @@ -6275,17 +6774,17 @@ async def test_list_channel_connections_async_pages(): channel_connection.ChannelConnection(), channel_connection.ChannelConnection(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListChannelConnectionsResponse( channel_connections=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListChannelConnectionsResponse( channel_connections=[ channel_connection.ChannelConnection(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListChannelConnectionsResponse( channel_connections=[ @@ -6298,18 +6797,22 @@ async def test_list_channel_connections_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_channel_connections(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.CreateChannelConnectionRequest, - dict, -]) -def test_create_channel_connection(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + eventarc.CreateChannelConnectionRequest, + dict, + ], +) +def test_create_channel_connection(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6321,10 +6824,10 @@ def test_create_channel_connection(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: + type(client.transport.create_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_channel_connection(request) # Establish that the underlying gRPC stub method was called. @@ -6342,30 +6845,33 @@ def test_create_channel_connection_non_empty_request_with_auto_populated_field() # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.CreateChannelConnectionRequest( - parent='parent_value', - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection_id="channel_connection_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.create_channel_connection), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_channel_connection(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateChannelConnectionRequest( - parent='parent_value', - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection_id="channel_connection_id_value", ) + def test_create_channel_connection_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6380,12 +6886,19 @@ def test_create_channel_connection_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_channel_connection in client._transport._wrapped_methods + assert ( + client._transport.create_channel_connection + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_channel_connection] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_channel_connection + ] = mock_rpc request = {} client.create_channel_connection(request) @@ -6403,8 +6916,11 @@ def test_create_channel_connection_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_channel_connection_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6418,12 +6934,17 @@ async def test_create_channel_connection_async_use_cached_wrapped_rpc(transport: wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_channel_connection in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_channel_connection + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_channel_connection] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_channel_connection + ] = mock_rpc request = {} await client.create_channel_connection(request) @@ -6442,8 +6963,12 @@ async def test_create_channel_connection_async_use_cached_wrapped_rpc(transport: assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelConnectionRequest): +async def test_create_channel_connection_async( + transport: str = "grpc_asyncio", + request_type=eventarc.CreateChannelConnectionRequest, +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6455,11 +6980,11 @@ async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: + type(client.transport.create_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.create_channel_connection(request) @@ -6477,6 +7002,7 @@ async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', async def test_create_channel_connection_async_from_dict(): await test_create_channel_connection_async(request_type=dict) + def test_create_channel_connection_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6486,13 +7012,13 @@ def test_create_channel_connection_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.CreateChannelConnectionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.create_channel_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_channel_connection(request) # Establish that the underlying gRPC stub method was called. @@ -6503,9 +7029,9 @@ def test_create_channel_connection_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6518,13 +7044,15 @@ async def test_create_channel_connection_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.CreateChannelConnectionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + type(client.transport.create_channel_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.create_channel_connection(request) # Establish that the underlying gRPC stub method was called. @@ -6535,9 +7063,9 @@ async def test_create_channel_connection_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_channel_connection_flattened(): @@ -6547,16 +7075,18 @@ def test_create_channel_connection_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: + type(client.transport.create_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_channel_connection( - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection( + name="name_value" + ), + channel_connection_id="channel_connection_id_value", ) # Establish that the underlying call was made with the expected @@ -6564,13 +7094,13 @@ def test_create_channel_connection_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].channel_connection - mock_val = gce_channel_connection.ChannelConnection(name='name_value') + mock_val = gce_channel_connection.ChannelConnection(name="name_value") assert arg == mock_val arg = args[0].channel_connection_id - mock_val = 'channel_connection_id_value' + mock_val = "channel_connection_id_value" assert arg == mock_val @@ -6584,11 +7114,14 @@ def test_create_channel_connection_flattened_error(): with pytest.raises(ValueError): client.create_channel_connection( eventarc.CreateChannelConnectionRequest(), - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection( + name="name_value" + ), + channel_connection_id="channel_connection_id_value", ) + @pytest.mark.asyncio async def test_create_channel_connection_flattened_async(): client = EventarcAsyncClient( @@ -6597,20 +7130,22 @@ async def test_create_channel_connection_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: + type(client.transport.create_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_channel_connection( - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection( + name="name_value" + ), + channel_connection_id="channel_connection_id_value", ) # Establish that the underlying call was made with the expected @@ -6618,15 +7153,16 @@ async def test_create_channel_connection_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].channel_connection - mock_val = gce_channel_connection.ChannelConnection(name='name_value') + mock_val = gce_channel_connection.ChannelConnection(name="name_value") assert arg == mock_val arg = args[0].channel_connection_id - mock_val = 'channel_connection_id_value' + mock_val = "channel_connection_id_value" assert arg == mock_val + @pytest.mark.asyncio async def test_create_channel_connection_flattened_error_async(): client = EventarcAsyncClient( @@ -6638,17 +7174,22 @@ async def test_create_channel_connection_flattened_error_async(): with pytest.raises(ValueError): await client.create_channel_connection( eventarc.CreateChannelConnectionRequest(), - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection( + name="name_value" + ), + channel_connection_id="channel_connection_id_value", ) -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteChannelConnectionRequest, - dict, -]) -def test_delete_channel_connection(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.DeleteChannelConnectionRequest, + dict, + ], +) +def test_delete_channel_connection(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6660,10 +7201,10 @@ def test_delete_channel_connection(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: + type(client.transport.delete_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.delete_channel_connection(request) # Establish that the underlying gRPC stub method was called. @@ -6681,28 +7222,31 @@ def test_delete_channel_connection_non_empty_request_with_auto_populated_field() # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.DeleteChannelConnectionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.delete_channel_connection), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_channel_connection(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteChannelConnectionRequest( - name='name_value', + name="name_value", ) + def test_delete_channel_connection_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6717,12 +7261,19 @@ def test_delete_channel_connection_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_channel_connection in client._transport._wrapped_methods + assert ( + client._transport.delete_channel_connection + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_channel_connection] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_channel_connection + ] = mock_rpc request = {} client.delete_channel_connection(request) @@ -6740,8 +7291,11 @@ def test_delete_channel_connection_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_channel_connection_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6755,12 +7309,17 @@ async def test_delete_channel_connection_async_use_cached_wrapped_rpc(transport: wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_channel_connection in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_channel_connection + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_channel_connection] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_channel_connection + ] = mock_rpc request = {} await client.delete_channel_connection(request) @@ -6779,8 +7338,12 @@ async def test_delete_channel_connection_async_use_cached_wrapped_rpc(transport: assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelConnectionRequest): +async def test_delete_channel_connection_async( + transport: str = "grpc_asyncio", + request_type=eventarc.DeleteChannelConnectionRequest, +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6792,11 +7355,11 @@ async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: + type(client.transport.delete_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.delete_channel_connection(request) @@ -6814,6 +7377,7 @@ async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', async def test_delete_channel_connection_async_from_dict(): await test_delete_channel_connection_async(request_type=dict) + def test_delete_channel_connection_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6823,13 +7387,13 @@ def test_delete_channel_connection_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.DeleteChannelConnectionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.delete_channel_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_channel_connection(request) # Establish that the underlying gRPC stub method was called. @@ -6840,9 +7404,9 @@ def test_delete_channel_connection_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6855,13 +7419,15 @@ async def test_delete_channel_connection_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.DeleteChannelConnectionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + type(client.transport.delete_channel_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.delete_channel_connection(request) # Establish that the underlying gRPC stub method was called. @@ -6872,9 +7438,9 @@ async def test_delete_channel_connection_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_channel_connection_flattened(): @@ -6884,14 +7450,14 @@ def test_delete_channel_connection_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: + type(client.transport.delete_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_channel_connection( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6899,7 +7465,7 @@ def test_delete_channel_connection_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -6913,9 +7479,10 @@ def test_delete_channel_connection_flattened_error(): with pytest.raises(ValueError): client.delete_channel_connection( eventarc.DeleteChannelConnectionRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_channel_connection_flattened_async(): client = EventarcAsyncClient( @@ -6924,18 +7491,18 @@ async def test_delete_channel_connection_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: + type(client.transport.delete_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_channel_connection( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6943,9 +7510,10 @@ async def test_delete_channel_connection_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_channel_connection_flattened_error_async(): client = EventarcAsyncClient( @@ -6957,15 +7525,18 @@ async def test_delete_channel_connection_flattened_error_async(): with pytest.raises(ValueError): await client.delete_channel_connection( eventarc.DeleteChannelConnectionRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - eventarc.GetGoogleChannelConfigRequest, - dict, -]) -def test_get_google_channel_config(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.GetGoogleChannelConfigRequest, + dict, + ], +) +def test_get_google_channel_config(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6977,12 +7548,12 @@ def test_get_google_channel_config(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: + type(client.transport.get_google_channel_config), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", ) response = client.get_google_channel_config(request) @@ -6994,8 +7565,8 @@ def test_get_google_channel_config(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" def test_get_google_channel_config_non_empty_request_with_auto_populated_field(): @@ -7003,28 +7574,31 @@ def test_get_google_channel_config_non_empty_request_with_auto_populated_field() # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.GetGoogleChannelConfigRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.get_google_channel_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_google_channel_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetGoogleChannelConfigRequest( - name='name_value', + name="name_value", ) + def test_get_google_channel_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7039,12 +7613,19 @@ def test_get_google_channel_config_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_google_channel_config in client._transport._wrapped_methods + assert ( + client._transport.get_google_channel_config + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_google_channel_config] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_google_channel_config + ] = mock_rpc request = {} client.get_google_channel_config(request) @@ -7057,8 +7638,11 @@ def test_get_google_channel_config_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_google_channel_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_google_channel_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7072,12 +7656,17 @@ async def test_get_google_channel_config_async_use_cached_wrapped_rpc(transport: wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_google_channel_config in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_google_channel_config + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_google_channel_config] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_google_channel_config + ] = mock_rpc request = {} await client.get_google_channel_config(request) @@ -7091,8 +7680,11 @@ async def test_get_google_channel_config_async_use_cached_wrapped_rpc(transport: assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetGoogleChannelConfigRequest): +async def test_get_google_channel_config_async( + transport: str = "grpc_asyncio", request_type=eventarc.GetGoogleChannelConfigRequest +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7104,13 +7696,15 @@ async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: + type(client.transport.get_google_channel_config), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + google_channel_config.GoogleChannelConfig( + name="name_value", + crypto_key_name="crypto_key_name_value", + ) + ) response = await client.get_google_channel_config(request) # Establish that the underlying gRPC stub method was called. @@ -7121,14 +7715,15 @@ async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', # Establish that the response is the type that we expect. assert isinstance(response, google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.asyncio async def test_get_google_channel_config_async_from_dict(): await test_get_google_channel_config_async(request_type=dict) + def test_get_google_channel_config_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7138,12 +7733,12 @@ def test_get_google_channel_config_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.GetGoogleChannelConfigRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: + type(client.transport.get_google_channel_config), "__call__" + ) as call: call.return_value = google_channel_config.GoogleChannelConfig() client.get_google_channel_config(request) @@ -7155,9 +7750,9 @@ def test_get_google_channel_config_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7170,13 +7765,15 @@ async def test_get_google_channel_config_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.GetGoogleChannelConfigRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig()) + type(client.transport.get_google_channel_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + google_channel_config.GoogleChannelConfig() + ) await client.get_google_channel_config(request) # Establish that the underlying gRPC stub method was called. @@ -7187,9 +7784,9 @@ async def test_get_google_channel_config_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_google_channel_config_flattened(): @@ -7199,14 +7796,14 @@ def test_get_google_channel_config_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: + type(client.transport.get_google_channel_config), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = google_channel_config.GoogleChannelConfig() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_google_channel_config( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7214,7 +7811,7 @@ def test_get_google_channel_config_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7228,9 +7825,10 @@ def test_get_google_channel_config_flattened_error(): with pytest.raises(ValueError): client.get_google_channel_config( eventarc.GetGoogleChannelConfigRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_google_channel_config_flattened_async(): client = EventarcAsyncClient( @@ -7239,16 +7837,18 @@ async def test_get_google_channel_config_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: + type(client.transport.get_google_channel_config), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = google_channel_config.GoogleChannelConfig() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + google_channel_config.GoogleChannelConfig() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_google_channel_config( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7256,9 +7856,10 @@ async def test_get_google_channel_config_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_google_channel_config_flattened_error_async(): client = EventarcAsyncClient( @@ -7270,15 +7871,18 @@ async def test_get_google_channel_config_flattened_error_async(): with pytest.raises(ValueError): await client.get_google_channel_config( eventarc.GetGoogleChannelConfigRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateGoogleChannelConfigRequest, - dict, -]) -def test_update_google_channel_config(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + eventarc.UpdateGoogleChannelConfigRequest, + dict, + ], +) +def test_update_google_channel_config(request_type, transport: str = "grpc"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7290,12 +7894,12 @@ def test_update_google_channel_config(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: + type(client.transport.update_google_channel_config), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", ) response = client.update_google_channel_config(request) @@ -7307,8 +7911,8 @@ def test_update_google_channel_config(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" def test_update_google_channel_config_non_empty_request_with_auto_populated_field(): @@ -7316,25 +7920,26 @@ def test_update_google_channel_config_non_empty_request_with_auto_populated_fiel # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = eventarc.UpdateGoogleChannelConfigRequest( - ) + request = eventarc.UpdateGoogleChannelConfigRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.update_google_channel_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_google_channel_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateGoogleChannelConfigRequest( - ) + assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + def test_update_google_channel_config_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7350,12 +7955,19 @@ def test_update_google_channel_config_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_google_channel_config in client._transport._wrapped_methods + assert ( + client._transport.update_google_channel_config + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_google_channel_config + ] = mock_rpc request = {} client.update_google_channel_config(request) @@ -7368,8 +7980,11 @@ def test_update_google_channel_config_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_google_channel_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_google_channel_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7383,12 +7998,17 @@ async def test_update_google_channel_config_async_use_cached_wrapped_rpc(transpo wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_google_channel_config in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_google_channel_config + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_google_channel_config] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_google_channel_config + ] = mock_rpc request = {} await client.update_google_channel_config(request) @@ -7402,8 +8022,12 @@ async def test_update_google_channel_config_async_use_cached_wrapped_rpc(transpo assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateGoogleChannelConfigRequest): +async def test_update_google_channel_config_async( + transport: str = "grpc_asyncio", + request_type=eventarc.UpdateGoogleChannelConfigRequest, +): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7415,13 +8039,15 @@ async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: + type(client.transport.update_google_channel_config), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gce_google_channel_config.GoogleChannelConfig( + name="name_value", + crypto_key_name="crypto_key_name_value", + ) + ) response = await client.update_google_channel_config(request) # Establish that the underlying gRPC stub method was called. @@ -7432,14 +8058,15 @@ async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio # Establish that the response is the type that we expect. assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.asyncio async def test_update_google_channel_config_async_from_dict(): await test_update_google_channel_config_async(request_type=dict) + def test_update_google_channel_config_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7449,12 +8076,12 @@ def test_update_google_channel_config_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.UpdateGoogleChannelConfigRequest() - request.google_channel_config.name = 'name_value' + request.google_channel_config.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: + type(client.transport.update_google_channel_config), "__call__" + ) as call: call.return_value = gce_google_channel_config.GoogleChannelConfig() client.update_google_channel_config(request) @@ -7466,9 +8093,9 @@ def test_update_google_channel_config_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'google_channel_config.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "google_channel_config.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7481,13 +8108,15 @@ async def test_update_google_channel_config_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.UpdateGoogleChannelConfigRequest() - request.google_channel_config.name = 'name_value' + request.google_channel_config.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig()) + type(client.transport.update_google_channel_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gce_google_channel_config.GoogleChannelConfig() + ) await client.update_google_channel_config(request) # Establish that the underlying gRPC stub method was called. @@ -7498,9 +8127,9 @@ async def test_update_google_channel_config_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'google_channel_config.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "google_channel_config.name=name_value", + ) in kw["metadata"] def test_update_google_channel_config_flattened(): @@ -7510,15 +8139,17 @@ def test_update_google_channel_config_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: + type(client.transport.update_google_channel_config), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = gce_google_channel_config.GoogleChannelConfig() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_google_channel_config( - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -7526,10 +8157,10 @@ def test_update_google_channel_config_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].google_channel_config - mock_val = gce_google_channel_config.GoogleChannelConfig(name='name_value') + mock_val = gce_google_channel_config.GoogleChannelConfig(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -7543,10 +8174,13 @@ def test_update_google_channel_config_flattened_error(): with pytest.raises(ValueError): client.update_google_channel_config( eventarc.UpdateGoogleChannelConfigRequest(), - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test_update_google_channel_config_flattened_async(): client = EventarcAsyncClient( @@ -7555,17 +8189,21 @@ async def test_update_google_channel_config_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: + type(client.transport.update_google_channel_config), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = gce_google_channel_config.GoogleChannelConfig() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gce_google_channel_config.GoogleChannelConfig() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_google_channel_config( - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -7573,12 +8211,13 @@ async def test_update_google_channel_config_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].google_channel_config - mock_val = gce_google_channel_config.GoogleChannelConfig(name='name_value') + mock_val = gce_google_channel_config.GoogleChannelConfig(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test_update_google_channel_config_flattened_error_async(): client = EventarcAsyncClient( @@ -7590,8 +8229,10 @@ async def test_update_google_channel_config_flattened_error_async(): with pytest.raises(ValueError): await client.update_google_channel_config( eventarc.UpdateGoogleChannelConfigRequest(), - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -7613,7 +8254,9 @@ def test_get_trigger_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_trigger] = mock_rpc request = {} @@ -7636,48 +8279,51 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = trigger.Trigger() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -7688,23 +8334,24 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques return_value = trigger.Trigger.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_trigger(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_trigger_rest_flattened(): @@ -7714,16 +8361,16 @@ def test_get_trigger_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = trigger.Trigger() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + sample_request = {"name": "projects/sample1/locations/sample2/triggers/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -7733,7 +8380,7 @@ def test_get_trigger_rest_flattened(): # Convert return value to protobuf type return_value = trigger.Trigger.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -7743,10 +8390,13 @@ def test_get_trigger_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, + args[1], + ) -def test_get_trigger_rest_flattened_error(transport: str = 'rest'): +def test_get_trigger_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7757,7 +8407,7 @@ def test_get_trigger_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_trigger( eventarc.GetTriggerRequest(), - name='name_value', + name="name_value", ) @@ -7779,7 +8429,9 @@ def test_list_triggers_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_triggers] = mock_rpc request = {} @@ -7802,50 +8454,60 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_triggers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_triggers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = eventarc.ListTriggersResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -7856,23 +8518,34 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe return_value = eventarc.ListTriggersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_triggers(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_triggers_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_triggers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_triggers_rest_flattened(): @@ -7882,16 +8555,16 @@ def test_list_triggers_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListTriggersResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -7901,7 +8574,7 @@ def test_list_triggers_rest_flattened(): # Convert return value to protobuf type return_value = eventarc.ListTriggersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -7911,10 +8584,13 @@ def test_list_triggers_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, + args[1], + ) -def test_list_triggers_rest_flattened_error(transport: str = 'rest'): +def test_list_triggers_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7925,20 +8601,20 @@ def test_list_triggers_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_triggers( eventarc.ListTriggersRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_triggers_rest_pager(transport: str = 'rest'): +def test_list_triggers_rest_pager(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( eventarc.ListTriggersResponse( @@ -7947,17 +8623,17 @@ def test_list_triggers_rest_pager(transport: str = 'rest'): trigger.Trigger(), trigger.Trigger(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListTriggersResponse( triggers=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListTriggersResponse( triggers=[ trigger.Trigger(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListTriggersResponse( triggers=[ @@ -7973,21 +8649,20 @@ def test_list_triggers_rest_pager(transport: str = 'rest'): response = tuple(eventarc.ListTriggersResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_triggers(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, trigger.Trigger) - for i in results) + assert all(isinstance(i, trigger.Trigger) for i in results) pages = list(client.list_triggers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -8009,7 +8684,9 @@ def test_create_trigger_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_trigger] = mock_rpc request = {} @@ -8029,7 +8706,9 @@ def test_create_trigger_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTriggerRequest): +def test_create_trigger_rest_required_fields( + request_type=eventarc.CreateTriggerRequest, +): transport_class = transports.EventarcRestTransport request_init = {} @@ -8038,16 +8717,17 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger request_init["validate_only"] = False request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "triggerId" not in jsonified_request assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -8056,53 +8736,60 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == request_init["validate_only"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["triggerId"] = 'trigger_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["triggerId"] = "trigger_id_value" jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_trigger._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("trigger_id", "validate_only", )) + assert not set(unset_fields) - set( + ( + "trigger_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' + assert jsonified_request["triggerId"] == "trigger_id_value" assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == True client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8118,15 +8805,32 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger str(False).lower(), ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", "validateOnly", ))) + assert set(unset_fields) == ( + set( + ( + "triggerId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "trigger", + "triggerId", + "validateOnly", + ) + ) + ) def test_create_trigger_rest_flattened(): @@ -8136,18 +8840,18 @@ def test_create_trigger_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) mock_args.update(sample_request) @@ -8155,7 +8859,7 @@ def test_create_trigger_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8165,10 +8869,13 @@ def test_create_trigger_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, + args[1], + ) -def test_create_trigger_rest_flattened_error(transport: str = 'rest'): +def test_create_trigger_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8179,9 +8886,9 @@ def test_create_trigger_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_trigger( eventarc.CreateTriggerRequest(), - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) @@ -8203,7 +8910,9 @@ def test_update_trigger_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_trigger] = mock_rpc request = {} @@ -8223,22 +8932,25 @@ def test_update_trigger_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTriggerRequest): +def test_update_trigger_rest_required_fields( + request_type=eventarc.UpdateTriggerRequest, +): transport_class = transports.EventarcRestTransport request_init = {} request_init["validate_only"] = False request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -8247,9 +8959,17 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_trigger._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8258,34 +8978,34 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8297,15 +9017,26 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger str(False).lower(), ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("validateOnly", ))) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "updateMask", + "validateOnly", + ) + ) + & set(("validateOnly",)) + ) def test_update_trigger_rest_flattened(): @@ -8315,17 +9046,19 @@ def test_update_trigger_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + sample_request = { + "trigger": {"name": "projects/sample1/locations/sample2/triggers/sample3"} + } # get truthy value for each flattened field mock_args = dict( - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) mock_args.update(sample_request) @@ -8334,7 +9067,7 @@ def test_update_trigger_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8344,10 +9077,14 @@ def test_update_trigger_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{trigger.name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{trigger.name=projects/*/locations/*/triggers/*}" + % client.transport._host, + args[1], + ) -def test_update_trigger_rest_flattened_error(transport: str = 'rest'): +def test_update_trigger_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8358,8 +9095,8 @@ def test_update_trigger_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_trigger( eventarc.UpdateTriggerRequest(), - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) @@ -8382,7 +9119,9 @@ def test_delete_trigger_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_trigger] = mock_rpc request = {} @@ -8402,7 +9141,9 @@ def test_delete_trigger_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTriggerRequest): +def test_delete_trigger_rest_required_fields( + request_type=eventarc.DeleteTriggerRequest, +): transport_class = transports.EventarcRestTransport request_init = {} @@ -8410,56 +9151,65 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger request_init["validate_only"] = False request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == request_init["validate_only"] - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_trigger._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == True client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -8467,7 +9217,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8479,15 +9229,31 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger str(False).lower(), ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", "validateOnly", ))) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "validateOnly", + ) + ) + & set( + ( + "name", + "validateOnly", + ) + ) + ) def test_delete_trigger_rest_flattened(): @@ -8497,16 +9263,16 @@ def test_delete_trigger_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + sample_request = {"name": "projects/sample1/locations/sample2/triggers/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", allow_missing=True, ) mock_args.update(sample_request) @@ -8515,7 +9281,7 @@ def test_delete_trigger_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8525,10 +9291,13 @@ def test_delete_trigger_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, + args[1], + ) -def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): +def test_delete_trigger_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8539,7 +9308,7 @@ def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_trigger( eventarc.DeleteTriggerRequest(), - name='name_value', + name="name_value", allow_missing=True, ) @@ -8562,7 +9331,9 @@ def test_get_channel_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_channel] = mock_rpc request = {} @@ -8585,48 +9356,51 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = channel.Channel() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -8637,23 +9411,24 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques return_value = channel.Channel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_channel(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_channel._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_channel_rest_flattened(): @@ -8663,16 +9438,16 @@ def test_get_channel_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = channel.Channel() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + sample_request = {"name": "projects/sample1/locations/sample2/channels/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -8682,7 +9457,7 @@ def test_get_channel_rest_flattened(): # Convert return value to protobuf type return_value = channel.Channel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8692,10 +9467,13 @@ def test_get_channel_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, + args[1], + ) -def test_get_channel_rest_flattened_error(transport: str = 'rest'): +def test_get_channel_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8706,7 +9484,7 @@ def test_get_channel_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_channel( eventarc.GetChannelRequest(), - name='name_value', + name="name_value", ) @@ -8728,7 +9506,9 @@ def test_list_channels_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_channels] = mock_rpc request = {} @@ -8751,50 +9531,59 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_channels._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_channels._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = eventarc.ListChannelsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -8805,23 +9594,33 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe return_value = eventarc.ListChannelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_channels(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_channels_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_channels._get_unset_required_fields({}) - assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_channels_rest_flattened(): @@ -8831,16 +9630,16 @@ def test_list_channels_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListChannelsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -8850,7 +9649,7 @@ def test_list_channels_rest_flattened(): # Convert return value to protobuf type return_value = eventarc.ListChannelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8860,10 +9659,13 @@ def test_list_channels_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, + args[1], + ) -def test_list_channels_rest_flattened_error(transport: str = 'rest'): +def test_list_channels_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8874,20 +9676,20 @@ def test_list_channels_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_channels( eventarc.ListChannelsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_channels_rest_pager(transport: str = 'rest'): +def test_list_channels_rest_pager(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( eventarc.ListChannelsResponse( @@ -8896,17 +9698,17 @@ def test_list_channels_rest_pager(transport: str = 'rest'): channel.Channel(), channel.Channel(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListChannelsResponse( channels=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListChannelsResponse( channels=[ channel.Channel(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListChannelsResponse( channels=[ @@ -8922,21 +9724,20 @@ def test_list_channels_rest_pager(transport: str = 'rest'): response = tuple(eventarc.ListChannelsResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_channels(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, channel.Channel) - for i in results) + assert all(isinstance(i, channel.Channel) for i in results) pages = list(client.list_channels(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -8958,7 +9759,9 @@ def test_create_channel_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_channel_] = mock_rpc request = {} @@ -8978,7 +9781,9 @@ def test_create_channel_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannelRequest): +def test_create_channel_rest_required_fields( + request_type=eventarc.CreateChannelRequest, +): transport_class = transports.EventarcRestTransport request_init = {} @@ -8987,16 +9792,17 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel request_init["validate_only"] = False request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "channelId" not in jsonified_request assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_channel_._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -9005,53 +9811,60 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == request_init["validate_only"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["channelId"] = 'channel_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["channelId"] = "channel_id_value" jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_channel_._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("channel_id", "validate_only", )) + assert not set(unset_fields) - set( + ( + "channel_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "channelId" in jsonified_request - assert jsonified_request["channelId"] == 'channel_id_value' + assert jsonified_request["channelId"] == "channel_id_value" assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == True client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9067,15 +9880,32 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel str(False).lower(), ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_channel_._get_unset_required_fields({}) - assert set(unset_fields) == (set(("channelId", "validateOnly", )) & set(("parent", "channel", "channelId", "validateOnly", ))) + assert set(unset_fields) == ( + set( + ( + "channelId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "channel", + "channelId", + "validateOnly", + ) + ) + ) def test_create_channel_rest_flattened(): @@ -9085,18 +9915,18 @@ def test_create_channel_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) mock_args.update(sample_request) @@ -9104,7 +9934,7 @@ def test_create_channel_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9114,10 +9944,13 @@ def test_create_channel_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, + args[1], + ) -def test_create_channel_rest_flattened_error(transport: str = 'rest'): +def test_create_channel_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9128,9 +9961,9 @@ def test_create_channel_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_channel( eventarc.CreateChannelRequest(), - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) @@ -9152,7 +9985,9 @@ def test_update_channel_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_channel] = mock_rpc request = {} @@ -9172,22 +10007,25 @@ def test_update_channel_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannelRequest): +def test_update_channel_rest_required_fields( + request_type=eventarc.UpdateChannelRequest, +): transport_class = transports.EventarcRestTransport request_init = {} request_init["validate_only"] = False request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -9196,9 +10034,16 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_channel._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", "validate_only", )) + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -9207,34 +10052,34 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9246,15 +10091,25 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel str(False).lower(), ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_channel._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("validateOnly", ))) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set(("validateOnly",)) + ) def test_update_channel_rest_flattened(): @@ -9264,17 +10119,19 @@ def test_update_channel_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + sample_request = { + "channel": {"name": "projects/sample1/locations/sample2/channels/sample3"} + } # get truthy value for each flattened field mock_args = dict( - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -9282,7 +10139,7 @@ def test_update_channel_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9292,10 +10149,14 @@ def test_update_channel_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{channel.name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{channel.name=projects/*/locations/*/channels/*}" + % client.transport._host, + args[1], + ) -def test_update_channel_rest_flattened_error(transport: str = 'rest'): +def test_update_channel_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9306,8 +10167,8 @@ def test_update_channel_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_channel( eventarc.UpdateChannelRequest(), - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -9329,7 +10190,9 @@ def test_delete_channel_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_channel] = mock_rpc request = {} @@ -9349,7 +10212,9 @@ def test_delete_channel_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannelRequest): +def test_delete_channel_rest_required_fields( + request_type=eventarc.DeleteChannelRequest, +): transport_class = transports.EventarcRestTransport request_init = {} @@ -9357,56 +10222,59 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel request_init["validate_only"] = False request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == request_init["validate_only"] - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_channel._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("validate_only", )) + assert not set(unset_fields) - set(("validate_only",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == True client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9414,7 +10282,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9426,15 +10294,25 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel str(False).lower(), ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_channel._get_unset_required_fields({}) - assert set(unset_fields) == (set(("validateOnly", )) & set(("name", "validateOnly", ))) + assert set(unset_fields) == ( + set(("validateOnly",)) + & set( + ( + "name", + "validateOnly", + ) + ) + ) def test_delete_channel_rest_flattened(): @@ -9444,16 +10322,16 @@ def test_delete_channel_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + sample_request = {"name": "projects/sample1/locations/sample2/channels/sample3"} # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -9461,7 +10339,7 @@ def test_delete_channel_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9471,10 +10349,13 @@ def test_delete_channel_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, + args[1], + ) -def test_delete_channel_rest_flattened_error(transport: str = 'rest'): +def test_delete_channel_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9485,7 +10366,7 @@ def test_delete_channel_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_channel( eventarc.DeleteChannelRequest(), - name='name_value', + name="name_value", ) @@ -9507,7 +10388,9 @@ def test_get_provider_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_provider] = mock_rpc request = {} @@ -9530,48 +10413,51 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_provider._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_provider._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = discovery.Provider() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9582,23 +10468,24 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ return_value = discovery.Provider.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_provider(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_provider_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_provider._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_provider_rest_flattened(): @@ -9608,16 +10495,18 @@ def test_get_provider_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = discovery.Provider() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/providers/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -9627,7 +10516,7 @@ def test_get_provider_rest_flattened(): # Convert return value to protobuf type return_value = discovery.Provider.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9637,10 +10526,13 @@ def test_get_provider_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/providers/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/providers/*}" % client.transport._host, + args[1], + ) -def test_get_provider_rest_flattened_error(transport: str = 'rest'): +def test_get_provider_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9651,7 +10543,7 @@ def test_get_provider_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_provider( eventarc.GetProviderRequest(), - name='name_value', + name="name_value", ) @@ -9673,7 +10565,9 @@ def test_list_providers_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_providers] = mock_rpc request = {} @@ -9689,57 +10583,69 @@ def test_list_providers_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_providers_rest_required_fields(request_type=eventarc.ListProvidersRequest): +def test_list_providers_rest_required_fields( + request_type=eventarc.ListProvidersRequest, +): transport_class = transports.EventarcRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_providers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_providers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = eventarc.ListProvidersResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9750,23 +10656,34 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders return_value = eventarc.ListProvidersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_providers(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_providers_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_providers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_providers_rest_flattened(): @@ -9776,16 +10693,16 @@ def test_list_providers_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListProvidersResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -9795,7 +10712,7 @@ def test_list_providers_rest_flattened(): # Convert return value to protobuf type return_value = eventarc.ListProvidersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9805,10 +10722,13 @@ def test_list_providers_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/providers" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/providers" % client.transport._host, + args[1], + ) -def test_list_providers_rest_flattened_error(transport: str = 'rest'): +def test_list_providers_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9819,20 +10739,20 @@ def test_list_providers_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_providers( eventarc.ListProvidersRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_providers_rest_pager(transport: str = 'rest'): +def test_list_providers_rest_pager(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( eventarc.ListProvidersResponse( @@ -9841,17 +10761,17 @@ def test_list_providers_rest_pager(transport: str = 'rest'): discovery.Provider(), discovery.Provider(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListProvidersResponse( providers=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListProvidersResponse( providers=[ discovery.Provider(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListProvidersResponse( providers=[ @@ -9867,21 +10787,20 @@ def test_list_providers_rest_pager(transport: str = 'rest'): response = tuple(eventarc.ListProvidersResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_providers(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, discovery.Provider) - for i in results) + assert all(isinstance(i, discovery.Provider) for i in results) pages = list(client.list_providers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -9899,12 +10818,19 @@ def test_get_channel_connection_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_channel_connection in client._transport._wrapped_methods + assert ( + client._transport.get_channel_connection + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_channel_connection] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_channel_connection] = ( + mock_rpc + ) request = {} client.get_channel_connection(request) @@ -9919,55 +10845,60 @@ def test_get_channel_connection_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetChannelConnectionRequest): +def test_get_channel_connection_rest_required_fields( + request_type=eventarc.GetChannelConnectionRequest, +): transport_class = transports.EventarcRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = channel_connection.ChannelConnection() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -9978,23 +10909,24 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh return_value = channel_connection.ChannelConnection.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_channel_connection(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_channel_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_channel_connection_rest_flattened(): @@ -10004,16 +10936,18 @@ def test_get_channel_connection_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = channel_connection.ChannelConnection() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/channelConnections/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -10023,7 +10957,7 @@ def test_get_channel_connection_rest_flattened(): # Convert return value to protobuf type return_value = channel_connection.ChannelConnection.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10033,10 +10967,14 @@ def test_get_channel_connection_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/channelConnections/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/channelConnections/*}" + % client.transport._host, + args[1], + ) -def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): +def test_get_channel_connection_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10047,7 +10985,7 @@ def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_channel_connection( eventarc.GetChannelConnectionRequest(), - name='name_value', + name="name_value", ) @@ -10065,12 +11003,19 @@ def test_list_channel_connections_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_channel_connections in client._transport._wrapped_methods + assert ( + client._transport.list_channel_connections + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_channel_connections] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_channel_connections + ] = mock_rpc request = {} client.list_channel_connections(request) @@ -10085,57 +11030,67 @@ def test_list_channel_connections_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_channel_connections_rest_required_fields(request_type=eventarc.ListChannelConnectionsRequest): +def test_list_channel_connections_rest_required_fields( + request_type=eventarc.ListChannelConnectionsRequest, +): transport_class = transports.EventarcRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_channel_connections._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_channel_connections._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = eventarc.ListChannelConnectionsResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -10146,23 +11101,32 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_channel_connections(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_channel_connections_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_channel_connections._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_channel_connections_rest_flattened(): @@ -10172,16 +11136,16 @@ def test_list_channel_connections_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListChannelConnectionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -10191,7 +11155,7 @@ def test_list_channel_connections_rest_flattened(): # Convert return value to protobuf type return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10201,10 +11165,14 @@ def test_list_channel_connections_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channelConnections" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/channelConnections" + % client.transport._host, + args[1], + ) -def test_list_channel_connections_rest_flattened_error(transport: str = 'rest'): +def test_list_channel_connections_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10215,20 +11183,20 @@ def test_list_channel_connections_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_channel_connections( eventarc.ListChannelConnectionsRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_channel_connections_rest_pager(transport: str = 'rest'): +def test_list_channel_connections_rest_pager(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( eventarc.ListChannelConnectionsResponse( @@ -10237,17 +11205,17 @@ def test_list_channel_connections_rest_pager(transport: str = 'rest'): channel_connection.ChannelConnection(), channel_connection.ChannelConnection(), ], - next_page_token='abc', + next_page_token="abc", ), eventarc.ListChannelConnectionsResponse( channel_connections=[], - next_page_token='def', + next_page_token="def", ), eventarc.ListChannelConnectionsResponse( channel_connections=[ channel_connection.ChannelConnection(), ], - next_page_token='ghi', + next_page_token="ghi", ), eventarc.ListChannelConnectionsResponse( channel_connections=[ @@ -10260,24 +11228,25 @@ def test_list_channel_connections_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(eventarc.ListChannelConnectionsResponse.to_json(x) for x in response) + response = tuple( + eventarc.ListChannelConnectionsResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_channel_connections(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, channel_connection.ChannelConnection) - for i in results) + assert all(isinstance(i, channel_connection.ChannelConnection) for i in results) pages = list(client.list_channel_connections(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -10295,12 +11264,19 @@ def test_create_channel_connection_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_channel_connection in client._transport._wrapped_methods + assert ( + client._transport.create_channel_connection + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_channel_connection] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_channel_connection + ] = mock_rpc request = {} client.create_channel_connection(request) @@ -10319,7 +11295,9 @@ def test_create_channel_connection_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_channel_connection_rest_required_fields(request_type=eventarc.CreateChannelConnectionRequest): +def test_create_channel_connection_rest_required_fields( + request_type=eventarc.CreateChannelConnectionRequest, +): transport_class = transports.EventarcRestTransport request_init = {} @@ -10327,65 +11305,71 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr request_init["channel_connection_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "channelConnectionId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "channelConnectionId" in jsonified_request - assert jsonified_request["channelConnectionId"] == request_init["channel_connection_id"] + assert ( + jsonified_request["channelConnectionId"] + == request_init["channel_connection_id"] + ) - jsonified_request["parent"] = 'parent_value' - jsonified_request["channelConnectionId"] = 'channel_connection_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["channelConnectionId"] = "channel_connection_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_channel_connection._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("channel_connection_id", )) + assert not set(unset_fields) - set(("channel_connection_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "channelConnectionId" in jsonified_request - assert jsonified_request["channelConnectionId"] == 'channel_connection_id_value' + assert jsonified_request["channelConnectionId"] == "channel_connection_id_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10397,15 +11381,26 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_channel_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("channelConnectionId", )) & set(("parent", "channelConnection", "channelConnectionId", ))) + assert set(unset_fields) == ( + set(("channelConnectionId",)) + & set( + ( + "parent", + "channelConnection", + "channelConnectionId", + ) + ) + ) def test_create_channel_connection_rest_flattened(): @@ -10415,18 +11410,20 @@ def test_create_channel_connection_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection( + name="name_value" + ), + channel_connection_id="channel_connection_id_value", ) mock_args.update(sample_request) @@ -10434,7 +11431,7 @@ def test_create_channel_connection_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10444,10 +11441,14 @@ def test_create_channel_connection_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channelConnections" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/channelConnections" + % client.transport._host, + args[1], + ) -def test_create_channel_connection_rest_flattened_error(transport: str = 'rest'): +def test_create_channel_connection_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10458,9 +11459,11 @@ def test_create_channel_connection_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.create_channel_connection( eventarc.CreateChannelConnectionRequest(), - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection( + name="name_value" + ), + channel_connection_id="channel_connection_id_value", ) @@ -10478,12 +11481,19 @@ def test_delete_channel_connection_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_channel_connection in client._transport._wrapped_methods + assert ( + client._transport.delete_channel_connection + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_channel_connection] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_channel_connection + ] = mock_rpc request = {} client.delete_channel_connection(request) @@ -10502,55 +11512,60 @@ def test_delete_channel_connection_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_channel_connection_rest_required_fields(request_type=eventarc.DeleteChannelConnectionRequest): +def test_delete_channel_connection_rest_required_fields( + request_type=eventarc.DeleteChannelConnectionRequest, +): transport_class = transports.EventarcRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -10558,23 +11573,24 @@ def test_delete_channel_connection_rest_required_fields(request_type=eventarc.De response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_channel_connection(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_channel_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_delete_channel_connection_rest_flattened(): @@ -10584,16 +11600,18 @@ def test_delete_channel_connection_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/channelConnections/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -10601,7 +11619,7 @@ def test_delete_channel_connection_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10611,10 +11629,14 @@ def test_delete_channel_connection_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/channelConnections/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/channelConnections/*}" + % client.transport._host, + args[1], + ) -def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest'): +def test_delete_channel_connection_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10625,7 +11647,7 @@ def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.delete_channel_connection( eventarc.DeleteChannelConnectionRequest(), - name='name_value', + name="name_value", ) @@ -10643,12 +11665,19 @@ def test_get_google_channel_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_google_channel_config in client._transport._wrapped_methods + assert ( + client._transport.get_google_channel_config + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_google_channel_config] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_google_channel_config + ] = mock_rpc request = {} client.get_google_channel_config(request) @@ -10663,55 +11692,60 @@ def test_get_google_channel_config_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_google_channel_config_rest_required_fields(request_type=eventarc.GetGoogleChannelConfigRequest): +def test_get_google_channel_config_rest_required_fields( + request_type=eventarc.GetGoogleChannelConfigRequest, +): transport_class = transports.EventarcRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = google_channel_config.GoogleChannelConfig() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -10722,23 +11756,24 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge return_value = google_channel_config.GoogleChannelConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_google_channel_config(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_google_channel_config_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_google_channel_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_google_channel_config_rest_flattened(): @@ -10748,16 +11783,18 @@ def test_get_google_channel_config_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = google_channel_config.GoogleChannelConfig() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + sample_request = { + "name": "projects/sample1/locations/sample2/googleChannelConfig" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -10767,7 +11804,7 @@ def test_get_google_channel_config_rest_flattened(): # Convert return value to protobuf type return_value = google_channel_config.GoogleChannelConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10777,10 +11814,14 @@ def test_get_google_channel_config_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/googleChannelConfig}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/googleChannelConfig}" + % client.transport._host, + args[1], + ) -def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest'): +def test_get_google_channel_config_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10791,7 +11832,7 @@ def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.get_google_channel_config( eventarc.GetGoogleChannelConfigRequest(), - name='name_value', + name="name_value", ) @@ -10809,12 +11850,19 @@ def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_google_channel_config in client._transport._wrapped_methods + assert ( + client._transport.update_google_channel_config + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_google_channel_config + ] = mock_rpc request = {} client.update_google_channel_config(request) @@ -10829,80 +11877,88 @@ def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_google_channel_config_rest_required_fields(request_type=eventarc.UpdateGoogleChannelConfigRequest): +def test_update_google_channel_config_rest_required_fields( + request_type=eventarc.UpdateGoogleChannelConfigRequest, +): transport_class = transports.EventarcRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_google_channel_config._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = gce_google_channel_config.GoogleChannelConfig() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + return_value = gce_google_channel_config.GoogleChannelConfig.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_google_channel_config(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_google_channel_config_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_google_channel_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("googleChannelConfig", ))) + assert set(unset_fields) == (set(("updateMask",)) & set(("googleChannelConfig",))) def test_update_google_channel_config_rest_flattened(): @@ -10912,17 +11968,23 @@ def test_update_google_channel_config_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = gce_google_channel_config.GoogleChannelConfig() # get arguments that satisfy an http rule for this method - sample_request = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + sample_request = { + "google_channel_config": { + "name": "projects/sample1/locations/sample2/googleChannelConfig" + } + } # get truthy value for each flattened field mock_args = dict( - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -10932,7 +11994,7 @@ def test_update_google_channel_config_rest_flattened(): # Convert return value to protobuf type return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10942,10 +12004,14 @@ def test_update_google_channel_config_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}" + % client.transport._host, + args[1], + ) -def test_update_google_channel_config_rest_flattened_error(transport: str = 'rest'): +def test_update_google_channel_config_rest_flattened_error(transport: str = "rest"): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10956,8 +12022,10 @@ def test_update_google_channel_config_rest_flattened_error(transport: str = 'res with pytest.raises(ValueError): client.update_google_channel_config( eventarc.UpdateGoogleChannelConfigRequest(), - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -10999,8 +12067,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = EventarcClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -11022,6 +12089,7 @@ def test_transport_instance(): client = EventarcClient(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.EventarcGrpcTransport( @@ -11036,18 +12104,23 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.EventarcGrpcTransport, - transports.EventarcGrpcAsyncIOTransport, - transports.EventarcRestTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, + transports.EventarcRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = EventarcClient.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -11057,8 +12130,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -11072,9 +12144,7 @@ def test_get_trigger_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.get_trigger), "__call__") as call: call.return_value = trigger.Trigger() client.get_trigger(request=None) @@ -11095,9 +12165,7 @@ def test_list_triggers_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: call.return_value = eventarc.ListTriggersResponse() client.list_triggers(request=None) @@ -11118,10 +12186,8 @@ def test_create_trigger_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_trigger), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_trigger(request=None) # Establish that the underlying stub method was called. @@ -11141,10 +12207,8 @@ def test_update_trigger_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.update_trigger), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_trigger(request=None) # Establish that the underlying stub method was called. @@ -11164,10 +12228,8 @@ def test_delete_trigger_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_trigger), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_trigger(request=None) # Establish that the underlying stub method was called. @@ -11187,9 +12249,7 @@ def test_get_channel_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.get_channel), "__call__") as call: call.return_value = channel.Channel() client.get_channel(request=None) @@ -11210,9 +12270,7 @@ def test_list_channels_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: call.return_value = eventarc.ListChannelsResponse() client.list_channels(request=None) @@ -11233,10 +12291,8 @@ def test_create_channel_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_channel_), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_channel(request=None) # Establish that the underlying stub method was called. @@ -11256,10 +12312,8 @@ def test_update_channel_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.update_channel), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_channel(request=None) # Establish that the underlying stub method was called. @@ -11279,10 +12333,8 @@ def test_delete_channel_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_channel), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_channel(request=None) # Establish that the underlying stub method was called. @@ -11302,9 +12354,7 @@ def test_get_provider_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: + with mock.patch.object(type(client.transport.get_provider), "__call__") as call: call.return_value = discovery.Provider() client.get_provider(request=None) @@ -11325,9 +12375,7 @@ def test_list_providers_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: call.return_value = eventarc.ListProvidersResponse() client.list_providers(request=None) @@ -11349,8 +12397,8 @@ def test_get_channel_connection_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: + type(client.transport.get_channel_connection), "__call__" + ) as call: call.return_value = channel_connection.ChannelConnection() client.get_channel_connection(request=None) @@ -11372,8 +12420,8 @@ def test_list_channel_connections_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: + type(client.transport.list_channel_connections), "__call__" + ) as call: call.return_value = eventarc.ListChannelConnectionsResponse() client.list_channel_connections(request=None) @@ -11395,9 +12443,9 @@ def test_create_channel_connection_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.create_channel_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_channel_connection(request=None) # Establish that the underlying stub method was called. @@ -11418,9 +12466,9 @@ def test_delete_channel_connection_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.delete_channel_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_channel_connection(request=None) # Establish that the underlying stub method was called. @@ -11441,8 +12489,8 @@ def test_get_google_channel_config_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: + type(client.transport.get_google_channel_config), "__call__" + ) as call: call.return_value = google_channel_config.GoogleChannelConfig() client.get_google_channel_config(request=None) @@ -11464,8 +12512,8 @@ def test_update_google_channel_config_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: + type(client.transport.update_google_channel_config), "__call__" + ) as call: call.return_value = gce_google_channel_config.GoogleChannelConfig() client.update_google_channel_config(request=None) @@ -11486,8 +12534,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -11502,17 +12549,17 @@ async def test_get_trigger_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.get_trigger), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + trigger.Trigger( + name="name_value", + uid="uid_value", + service_account="service_account_value", + channel="channel_value", + etag="etag_value", + ) + ) await client.get_trigger(request=None) # Establish that the underlying stub method was called. @@ -11533,14 +12580,14 @@ async def test_list_triggers_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListTriggersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) await client.list_triggers(request=None) # Establish that the underlying stub method was called. @@ -11561,12 +12608,10 @@ async def test_create_trigger_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.create_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.create_trigger(request=None) @@ -11588,12 +12633,10 @@ async def test_update_trigger_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.update_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.update_trigger(request=None) @@ -11615,12 +12658,10 @@ async def test_delete_trigger_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.delete_trigger(request=None) @@ -11642,18 +12683,18 @@ async def test_get_channel_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.get_channel), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', - state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + channel.Channel( + name="name_value", + uid="uid_value", + provider="provider_value", + state=channel.Channel.State.PENDING, + activation_token="activation_token_value", + crypto_key_name="crypto_key_name_value", + ) + ) await client.get_channel(request=None) # Establish that the underlying stub method was called. @@ -11674,14 +12715,14 @@ async def test_list_channels_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListChannelsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) await client.list_channels(request=None) # Establish that the underlying stub method was called. @@ -11702,12 +12743,10 @@ async def test_create_channel_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: + with mock.patch.object(type(client.transport.create_channel_), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.create_channel(request=None) @@ -11729,12 +12768,10 @@ async def test_update_channel_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.update_channel), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.update_channel(request=None) @@ -11756,12 +12793,10 @@ async def test_delete_channel_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_channel), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.delete_channel(request=None) @@ -11783,14 +12818,14 @@ async def test_get_provider_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: + with mock.patch.object(type(client.transport.get_provider), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( - name='name_value', - display_name='display_name_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discovery.Provider( + name="name_value", + display_name="display_name_value", + ) + ) await client.get_provider(request=None) # Establish that the underlying stub method was called. @@ -11811,14 +12846,14 @@ async def test_list_providers_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListProvidersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) await client.list_providers(request=None) # Establish that the underlying stub method was called. @@ -11840,15 +12875,17 @@ async def test_get_channel_connection_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: + type(client.transport.get_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + channel_connection.ChannelConnection( + name="name_value", + uid="uid_value", + channel="channel_value", + activation_token="activation_token_value", + ) + ) await client.get_channel_connection(request=None) # Establish that the underlying stub method was called. @@ -11870,13 +12907,15 @@ async def test_list_channel_connections_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: + type(client.transport.list_channel_connections), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + eventarc.ListChannelConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) await client.list_channel_connections(request=None) # Establish that the underlying stub method was called. @@ -11898,11 +12937,11 @@ async def test_create_channel_connection_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: + type(client.transport.create_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.create_channel_connection(request=None) @@ -11925,11 +12964,11 @@ async def test_delete_channel_connection_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: + type(client.transport.delete_channel_connection), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.delete_channel_connection(request=None) @@ -11952,13 +12991,15 @@ async def test_get_google_channel_config_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: + type(client.transport.get_google_channel_config), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + google_channel_config.GoogleChannelConfig( + name="name_value", + crypto_key_name="crypto_key_name_value", + ) + ) await client.get_google_channel_config(request=None) # Establish that the underlying stub method was called. @@ -11980,13 +13021,15 @@ async def test_update_google_channel_config_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: + type(client.transport.update_google_channel_config), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gce_google_channel_config.GoogleChannelConfig( + name="name_value", + crypto_key_name="crypto_key_name_value", + ) + ) await client.update_google_channel_config(request=None) # Establish that the underlying stub method was called. @@ -12006,18 +13049,20 @@ def test_transport_kind_rest(): def test_get_trigger_rest_bad_request(request_type=eventarc.GetTriggerRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/triggers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -12026,29 +13071,31 @@ def test_get_trigger_rest_bad_request(request_type=eventarc.GetTriggerRequest): client.get_trigger(request) -@pytest.mark.parametrize("request_type", [ - eventarc.GetTriggerRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.GetTriggerRequest, + dict, + ], +) def test_get_trigger_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/triggers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', + name="name_value", + uid="uid_value", + service_account="service_account_value", + channel="channel_value", + etag="etag_value", ) # Wrap the value into a proper Response obj @@ -12058,18 +13105,18 @@ def test_get_trigger_rest_call_success(request_type): # Convert return value to protobuf type return_value = trigger.Trigger.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_trigger(request) # Establish that the response is the type that we expect. assert isinstance(response, trigger.Trigger) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.channel == 'channel_value' - assert response.etag == 'etag_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.service_account == "service_account_value" + assert response.channel == "channel_value" + assert response.etag == "etag_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -12077,14 +13124,20 @@ def test_get_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.EventarcRestInterceptor, "post_get_trigger" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_get_trigger_with_metadata" + ) as post_with_metadata, + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -12103,7 +13156,7 @@ def test_get_trigger_rest_interceptors(null_interceptor): req.return_value.content = return_value request = eventarc.GetTriggerRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -12111,7 +13164,13 @@ def test_get_trigger_rest_interceptors(null_interceptor): post.return_value = trigger.Trigger() post_with_metadata.return_value = trigger.Trigger(), metadata - client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -12120,18 +13179,20 @@ def test_get_trigger_rest_interceptors(null_interceptor): def test_list_triggers_rest_bad_request(request_type=eventarc.ListTriggersRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -12140,26 +13201,28 @@ def test_list_triggers_rest_bad_request(request_type=eventarc.ListTriggersReques client.list_triggers(request) -@pytest.mark.parametrize("request_type", [ - eventarc.ListTriggersRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.ListTriggersRequest, + dict, + ], +) def test_list_triggers_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -12169,15 +13232,15 @@ def test_list_triggers_rest_call_success(request_type): # Convert return value to protobuf type return_value = eventarc.ListTriggersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_triggers(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTriggersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -12185,14 +13248,22 @@ def test_list_triggers_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.EventarcRestInterceptor, "post_list_triggers" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_list_triggers_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_list_triggers" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -12207,11 +13278,13 @@ def test_list_triggers_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) + return_value = eventarc.ListTriggersResponse.to_json( + eventarc.ListTriggersResponse() + ) req.return_value.content = return_value request = eventarc.ListTriggersRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -12219,7 +13292,13 @@ def test_list_triggers_rest_interceptors(null_interceptor): post.return_value = eventarc.ListTriggersResponse() post_with_metadata.return_value = eventarc.ListTriggersResponse(), metadata - client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_triggers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -12228,18 +13307,20 @@ def test_list_triggers_rest_interceptors(null_interceptor): def test_create_trigger_rest_bad_request(request_type=eventarc.CreateTriggerRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -12248,19 +13329,57 @@ def test_create_trigger_rest_bad_request(request_type=eventarc.CreateTriggerRequ client.create_trigger(request) -@pytest.mark.parametrize("request_type", [ - eventarc.CreateTriggerRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.CreateTriggerRequest, + dict, + ], +) def test_create_trigger_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["trigger"] = { + "name": "name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "event_filters": [ + { + "attribute": "attribute_value", + "value": "value_value", + "operator": "operator_value", + } + ], + "service_account": "service_account_value", + "destination": { + "cloud_run": { + "service": "service_value", + "path": "path_value", + "region": "region_value", + }, + "cloud_function": "cloud_function_value", + "gke": { + "cluster": "cluster_value", + "location": "location_value", + "namespace": "namespace_value", + "service": "service_value", + "path": "path_value", + }, + "workflow": "workflow_value", + }, + "transport": { + "pubsub": {"topic": "topic_value", "subscription": "subscription_value"} + }, + "labels": {}, + "channel": "channel_value", + "conditions": {}, + "etag": "etag_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -12280,7 +13399,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -12294,7 +13413,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["trigger"].items(): # pragma: NO COVER + for field, value in request_init["trigger"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -12309,12 +13428,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -12327,15 +13450,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_trigger(request) @@ -12349,15 +13472,23 @@ def test_create_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.EventarcRestInterceptor, "post_create_trigger" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_create_trigger_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_create_trigger" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -12376,7 +13507,7 @@ def test_create_trigger_rest_interceptors(null_interceptor): req.return_value.content = return_value request = eventarc.CreateTriggerRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -12384,7 +13515,13 @@ def test_create_trigger_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -12393,18 +13530,22 @@ def test_create_trigger_rest_interceptors(null_interceptor): def test_update_trigger_rest_bad_request(request_type=eventarc.UpdateTriggerRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request_init = { + "trigger": {"name": "projects/sample1/locations/sample2/triggers/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -12413,19 +13554,59 @@ def test_update_trigger_rest_bad_request(request_type=eventarc.UpdateTriggerRequ client.update_trigger(request) -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateTriggerRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.UpdateTriggerRequest, + dict, + ], +) def test_update_trigger_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request_init = { + "trigger": {"name": "projects/sample1/locations/sample2/triggers/sample3"} + } + request_init["trigger"] = { + "name": "projects/sample1/locations/sample2/triggers/sample3", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "event_filters": [ + { + "attribute": "attribute_value", + "value": "value_value", + "operator": "operator_value", + } + ], + "service_account": "service_account_value", + "destination": { + "cloud_run": { + "service": "service_value", + "path": "path_value", + "region": "region_value", + }, + "cloud_function": "cloud_function_value", + "gke": { + "cluster": "cluster_value", + "location": "location_value", + "namespace": "namespace_value", + "service": "service_value", + "path": "path_value", + }, + "workflow": "workflow_value", + }, + "transport": { + "pubsub": {"topic": "topic_value", "subscription": "subscription_value"} + }, + "labels": {}, + "channel": "channel_value", + "conditions": {}, + "etag": "etag_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -12445,7 +13626,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -12459,7 +13640,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["trigger"].items(): # pragma: NO COVER + for field, value in request_init["trigger"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -12474,12 +13655,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -12492,15 +13677,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_trigger(request) @@ -12514,15 +13699,23 @@ def test_update_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.EventarcRestInterceptor, "post_update_trigger" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_update_trigger_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_update_trigger" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -12541,7 +13734,7 @@ def test_update_trigger_rest_interceptors(null_interceptor): req.return_value.content = return_value request = eventarc.UpdateTriggerRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -12549,7 +13742,13 @@ def test_update_trigger_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -12558,18 +13757,20 @@ def test_update_trigger_rest_interceptors(null_interceptor): def test_delete_trigger_rest_bad_request(request_type=eventarc.DeleteTriggerRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/triggers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -12578,30 +13779,32 @@ def test_delete_trigger_rest_bad_request(request_type=eventarc.DeleteTriggerRequ client.delete_trigger(request) -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteTriggerRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.DeleteTriggerRequest, + dict, + ], +) def test_delete_trigger_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/triggers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_trigger(request) @@ -12615,15 +13818,23 @@ def test_delete_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.EventarcRestInterceptor, "post_delete_trigger" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_delete_trigger_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_delete_trigger" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -12642,7 +13853,7 @@ def test_delete_trigger_rest_interceptors(null_interceptor): req.return_value.content = return_value request = eventarc.DeleteTriggerRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -12650,7 +13861,13 @@ def test_delete_trigger_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -12659,18 +13876,20 @@ def test_delete_trigger_rest_interceptors(null_interceptor): def test_get_channel_rest_bad_request(request_type=eventarc.GetChannelRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/channels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -12679,31 +13898,33 @@ def test_get_channel_rest_bad_request(request_type=eventarc.GetChannelRequest): client.get_channel(request) -@pytest.mark.parametrize("request_type", [ - eventarc.GetChannelRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.GetChannelRequest, + dict, + ], +) def test_get_channel_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/channels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', - state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - pubsub_topic='pubsub_topic_value', + name="name_value", + uid="uid_value", + provider="provider_value", + state=channel.Channel.State.PENDING, + activation_token="activation_token_value", + crypto_key_name="crypto_key_name_value", + pubsub_topic="pubsub_topic_value", ) # Wrap the value into a proper Response obj @@ -12713,19 +13934,19 @@ def test_get_channel_rest_call_success(request_type): # Convert return value to protobuf type return_value = channel.Channel.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_channel(request) # Establish that the response is the type that we expect. assert isinstance(response, channel.Channel) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.provider == 'provider_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.provider == "provider_value" assert response.state == channel.Channel.State.PENDING - assert response.activation_token == 'activation_token_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.activation_token == "activation_token_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -12733,14 +13954,20 @@ def test_get_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.EventarcRestInterceptor, "post_get_channel" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_get_channel_with_metadata" + ) as post_with_metadata, + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel") as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -12759,7 +13986,7 @@ def test_get_channel_rest_interceptors(null_interceptor): req.return_value.content = return_value request = eventarc.GetChannelRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -12767,7 +13994,13 @@ def test_get_channel_rest_interceptors(null_interceptor): post.return_value = channel.Channel() post_with_metadata.return_value = channel.Channel(), metadata - client.get_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_channel( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -12776,18 +14009,20 @@ def test_get_channel_rest_interceptors(null_interceptor): def test_list_channels_rest_bad_request(request_type=eventarc.ListChannelsRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -12796,26 +14031,28 @@ def test_list_channels_rest_bad_request(request_type=eventarc.ListChannelsReques client.list_channels(request) -@pytest.mark.parametrize("request_type", [ - eventarc.ListChannelsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.ListChannelsRequest, + dict, + ], +) def test_list_channels_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -12825,15 +14062,15 @@ def test_list_channels_rest_call_success(request_type): # Convert return value to protobuf type return_value = eventarc.ListChannelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_channels(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -12841,14 +14078,22 @@ def test_list_channels_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channels") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.EventarcRestInterceptor, "post_list_channels" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_list_channels_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_list_channels" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -12863,11 +14108,13 @@ def test_list_channels_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = eventarc.ListChannelsResponse.to_json(eventarc.ListChannelsResponse()) + return_value = eventarc.ListChannelsResponse.to_json( + eventarc.ListChannelsResponse() + ) req.return_value.content = return_value request = eventarc.ListChannelsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -12875,7 +14122,13 @@ def test_list_channels_rest_interceptors(null_interceptor): post.return_value = eventarc.ListChannelsResponse() post_with_metadata.return_value = eventarc.ListChannelsResponse(), metadata - client.list_channels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_channels( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -12884,18 +14137,20 @@ def test_list_channels_rest_interceptors(null_interceptor): def test_create_channel_rest_bad_request(request_type=eventarc.CreateChannelRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -12904,19 +14159,31 @@ def test_create_channel_rest_bad_request(request_type=eventarc.CreateChannelRequ client.create_channel(request) -@pytest.mark.parametrize("request_type", [ - eventarc.CreateChannelRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.CreateChannelRequest, + dict, + ], +) def test_create_channel_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["channel"] = { + "name": "name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "provider": "provider_value", + "pubsub_topic": "pubsub_topic_value", + "state": 1, + "activation_token": "activation_token_value", + "crypto_key_name": "crypto_key_name_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -12936,7 +14203,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -12950,7 +14217,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel"].items(): # pragma: NO COVER + for field, value in request_init["channel"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -12965,12 +14232,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -12983,15 +14254,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_channel(request) @@ -13005,15 +14276,23 @@ def test_create_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.EventarcRestInterceptor, "post_create_channel" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_create_channel_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_create_channel" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -13032,7 +14311,7 @@ def test_create_channel_rest_interceptors(null_interceptor): req.return_value.content = return_value request = eventarc.CreateChannelRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -13040,7 +14319,13 @@ def test_create_channel_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_channel( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -13049,18 +14334,22 @@ def test_create_channel_rest_interceptors(null_interceptor): def test_update_channel_rest_bad_request(request_type=eventarc.UpdateChannelRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request_init = { + "channel": {"name": "projects/sample1/locations/sample2/channels/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -13069,19 +14358,33 @@ def test_update_channel_rest_bad_request(request_type=eventarc.UpdateChannelRequ client.update_channel(request) -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateChannelRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.UpdateChannelRequest, + dict, + ], +) def test_update_channel_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} - request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request_init = { + "channel": {"name": "projects/sample1/locations/sample2/channels/sample3"} + } + request_init["channel"] = { + "name": "projects/sample1/locations/sample2/channels/sample3", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "provider": "provider_value", + "pubsub_topic": "pubsub_topic_value", + "state": 1, + "activation_token": "activation_token_value", + "crypto_key_name": "crypto_key_name_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -13101,7 +14404,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -13115,7 +14418,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel"].items(): # pragma: NO COVER + for field, value in request_init["channel"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -13130,12 +14433,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -13148,15 +14455,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_channel(request) @@ -13170,15 +14477,23 @@ def test_update_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_channel") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.EventarcRestInterceptor, "post_update_channel" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_update_channel_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_update_channel" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -13197,7 +14512,7 @@ def test_update_channel_rest_interceptors(null_interceptor): req.return_value.content = return_value request = eventarc.UpdateChannelRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -13205,7 +14520,13 @@ def test_update_channel_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_channel( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -13214,18 +14535,20 @@ def test_update_channel_rest_interceptors(null_interceptor): def test_delete_channel_rest_bad_request(request_type=eventarc.DeleteChannelRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/channels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -13234,30 +14557,32 @@ def test_delete_channel_rest_bad_request(request_type=eventarc.DeleteChannelRequ client.delete_channel(request) -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteChannelRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.DeleteChannelRequest, + dict, + ], +) def test_delete_channel_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/channels/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_channel(request) @@ -13271,15 +14596,23 @@ def test_delete_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.EventarcRestInterceptor, "post_delete_channel" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_delete_channel_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_delete_channel" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -13298,7 +14631,7 @@ def test_delete_channel_rest_interceptors(null_interceptor): req.return_value.content = return_value request = eventarc.DeleteChannelRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -13306,7 +14639,13 @@ def test_delete_channel_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_channel( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -13315,18 +14654,20 @@ def test_delete_channel_rest_interceptors(null_interceptor): def test_get_provider_rest_bad_request(request_type=eventarc.GetProviderRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/providers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -13335,26 +14676,28 @@ def test_get_provider_rest_bad_request(request_type=eventarc.GetProviderRequest) client.get_provider(request) -@pytest.mark.parametrize("request_type", [ - eventarc.GetProviderRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.GetProviderRequest, + dict, + ], +) def test_get_provider_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/providers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = discovery.Provider( - name='name_value', - display_name='display_name_value', + name="name_value", + display_name="display_name_value", ) # Wrap the value into a proper Response obj @@ -13364,15 +14707,15 @@ def test_get_provider_rest_call_success(request_type): # Convert return value to protobuf type return_value = discovery.Provider.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_provider(request) # Establish that the response is the type that we expect. assert isinstance(response, discovery.Provider) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -13380,14 +14723,22 @@ def test_get_provider_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_provider") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.EventarcRestInterceptor, "post_get_provider" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_get_provider_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_get_provider" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -13406,7 +14757,7 @@ def test_get_provider_rest_interceptors(null_interceptor): req.return_value.content = return_value request = eventarc.GetProviderRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -13414,7 +14765,13 @@ def test_get_provider_rest_interceptors(null_interceptor): post.return_value = discovery.Provider() post_with_metadata.return_value = discovery.Provider(), metadata - client.get_provider(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_provider( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -13423,18 +14780,20 @@ def test_get_provider_rest_interceptors(null_interceptor): def test_list_providers_rest_bad_request(request_type=eventarc.ListProvidersRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -13443,26 +14802,28 @@ def test_list_providers_rest_bad_request(request_type=eventarc.ListProvidersRequ client.list_providers(request) -@pytest.mark.parametrize("request_type", [ - eventarc.ListProvidersRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.ListProvidersRequest, + dict, + ], +) def test_list_providers_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -13472,15 +14833,15 @@ def test_list_providers_rest_call_success(request_type): # Convert return value to protobuf type return_value = eventarc.ListProvidersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_providers(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListProvidersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -13488,14 +14849,22 @@ def test_list_providers_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_providers") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.EventarcRestInterceptor, "post_list_providers" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, "post_list_providers_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_list_providers" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -13510,11 +14879,13 @@ def test_list_providers_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = eventarc.ListProvidersResponse.to_json(eventarc.ListProvidersResponse()) + return_value = eventarc.ListProvidersResponse.to_json( + eventarc.ListProvidersResponse() + ) req.return_value.content = return_value request = eventarc.ListProvidersRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -13522,27 +14893,39 @@ def test_list_providers_rest_interceptors(null_interceptor): post.return_value = eventarc.ListProvidersResponse() post_with_metadata.return_value = eventarc.ListProvidersResponse(), metadata - client.list_providers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_providers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_channel_connection_rest_bad_request(request_type=eventarc.GetChannelConnectionRequest): +def test_get_channel_connection_rest_bad_request( + request_type=eventarc.GetChannelConnectionRequest, +): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/channelConnections/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -13551,28 +14934,32 @@ def test_get_channel_connection_rest_bad_request(request_type=eventarc.GetChanne client.get_channel_connection(request) -@pytest.mark.parametrize("request_type", [ - eventarc.GetChannelConnectionRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.GetChannelConnectionRequest, + dict, + ], +) def test_get_channel_connection_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/channelConnections/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', + name="name_value", + uid="uid_value", + channel="channel_value", + activation_token="activation_token_value", ) # Wrap the value into a proper Response obj @@ -13582,17 +14969,17 @@ def test_get_channel_connection_rest_call_success(request_type): # Convert return value to protobuf type return_value = channel_connection.ChannelConnection.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_channel_connection(request) # Establish that the response is the type that we expect. assert isinstance(response, channel_connection.ChannelConnection) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.channel == 'channel_value' - assert response.activation_token == 'activation_token_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.channel == "channel_value" + assert response.activation_token == "activation_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -13600,18 +14987,29 @@ def test_get_channel_connection_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel_connection") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.EventarcRestInterceptor, "post_get_channel_connection" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, + "post_get_channel_connection_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_get_channel_connection" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.GetChannelConnectionRequest.pb(eventarc.GetChannelConnectionRequest()) + pb_message = eventarc.GetChannelConnectionRequest.pb( + eventarc.GetChannelConnectionRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13622,39 +15020,54 @@ def test_get_channel_connection_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = channel_connection.ChannelConnection.to_json(channel_connection.ChannelConnection()) + return_value = channel_connection.ChannelConnection.to_json( + channel_connection.ChannelConnection() + ) req.return_value.content = return_value request = eventarc.GetChannelConnectionRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = channel_connection.ChannelConnection() - post_with_metadata.return_value = channel_connection.ChannelConnection(), metadata + post_with_metadata.return_value = ( + channel_connection.ChannelConnection(), + metadata, + ) - client.get_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_channel_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_list_channel_connections_rest_bad_request(request_type=eventarc.ListChannelConnectionsRequest): +def test_list_channel_connections_rest_bad_request( + request_type=eventarc.ListChannelConnectionsRequest, +): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -13663,26 +15076,28 @@ def test_list_channel_connections_rest_bad_request(request_type=eventarc.ListCha client.list_channel_connections(request) -@pytest.mark.parametrize("request_type", [ - eventarc.ListChannelConnectionsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.ListChannelConnectionsRequest, + dict, + ], +) def test_list_channel_connections_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -13692,15 +15107,15 @@ def test_list_channel_connections_rest_call_success(request_type): # Convert return value to protobuf type return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_channel_connections(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -13708,18 +15123,29 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channel_connections") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.EventarcRestInterceptor, "post_list_channel_connections" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, + "post_list_channel_connections_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_list_channel_connections" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.ListChannelConnectionsRequest.pb(eventarc.ListChannelConnectionsRequest()) + pb_message = eventarc.ListChannelConnectionsRequest.pb( + eventarc.ListChannelConnectionsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13730,39 +15156,54 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = eventarc.ListChannelConnectionsResponse.to_json(eventarc.ListChannelConnectionsResponse()) + return_value = eventarc.ListChannelConnectionsResponse.to_json( + eventarc.ListChannelConnectionsResponse() + ) req.return_value.content = return_value request = eventarc.ListChannelConnectionsRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = eventarc.ListChannelConnectionsResponse() - post_with_metadata.return_value = eventarc.ListChannelConnectionsResponse(), metadata + post_with_metadata.return_value = ( + eventarc.ListChannelConnectionsResponse(), + metadata, + ) - client.list_channel_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_channel_connections( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_create_channel_connection_rest_bad_request(request_type=eventarc.CreateChannelConnectionRequest): +def test_create_channel_connection_rest_bad_request( + request_type=eventarc.CreateChannelConnectionRequest, +): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -13771,25 +15212,36 @@ def test_create_channel_connection_rest_bad_request(request_type=eventarc.Create client.create_channel_connection(request) -@pytest.mark.parametrize("request_type", [ - eventarc.CreateChannelConnectionRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.CreateChannelConnectionRequest, + dict, + ], +) def test_create_channel_connection_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["channel_connection"] = { + "name": "name_value", + "uid": "uid_value", + "channel": "channel_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "activation_token": "activation_token_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"] + test_field = eventarc.CreateChannelConnectionRequest.meta.fields[ + "channel_connection" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -13803,7 +15255,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -13817,7 +15269,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel_connection"].items(): # pragma: NO COVER + for field, value in request_init["channel_connection"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -13832,12 +15284,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -13850,15 +15306,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_channel_connection(request) @@ -13872,19 +15328,30 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel_connection") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.EventarcRestInterceptor, "post_create_channel_connection" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, + "post_create_channel_connection_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_create_channel_connection" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.CreateChannelConnectionRequest.pb(eventarc.CreateChannelConnectionRequest()) + pb_message = eventarc.CreateChannelConnectionRequest.pb( + eventarc.CreateChannelConnectionRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -13899,7 +15366,7 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): req.return_value.content = return_value request = eventarc.CreateChannelConnectionRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -13907,27 +15374,39 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_channel_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_delete_channel_connection_rest_bad_request(request_type=eventarc.DeleteChannelConnectionRequest): +def test_delete_channel_connection_rest_bad_request( + request_type=eventarc.DeleteChannelConnectionRequest, +): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/channelConnections/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -13936,30 +15415,34 @@ def test_delete_channel_connection_rest_bad_request(request_type=eventarc.Delete client.delete_channel_connection(request) -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteChannelConnectionRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.DeleteChannelConnectionRequest, + dict, + ], +) def test_delete_channel_connection_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request_init = { + "name": "projects/sample1/locations/sample2/channelConnections/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_channel_connection(request) @@ -13973,19 +15456,30 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel_connection") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.EventarcRestInterceptor, "post_delete_channel_connection" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, + "post_delete_channel_connection_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_delete_channel_connection" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.DeleteChannelConnectionRequest.pb(eventarc.DeleteChannelConnectionRequest()) + pb_message = eventarc.DeleteChannelConnectionRequest.pb( + eventarc.DeleteChannelConnectionRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14000,7 +15494,7 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): req.return_value.content = return_value request = eventarc.DeleteChannelConnectionRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -14008,27 +15502,37 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_channel_connection( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_google_channel_config_rest_bad_request(request_type=eventarc.GetGoogleChannelConfigRequest): +def test_get_google_channel_config_rest_bad_request( + request_type=eventarc.GetGoogleChannelConfigRequest, +): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request_init = {"name": "projects/sample1/locations/sample2/googleChannelConfig"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14037,26 +15541,28 @@ def test_get_google_channel_config_rest_bad_request(request_type=eventarc.GetGoo client.get_google_channel_config(request) -@pytest.mark.parametrize("request_type", [ - eventarc.GetGoogleChannelConfigRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.GetGoogleChannelConfigRequest, + dict, + ], +) def test_get_google_channel_config_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request_init = {"name": "projects/sample1/locations/sample2/googleChannelConfig"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", ) # Wrap the value into a proper Response obj @@ -14066,15 +15572,15 @@ def test_get_google_channel_config_rest_call_success(request_type): # Convert return value to protobuf type return_value = google_channel_config.GoogleChannelConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_google_channel_config(request) # Establish that the response is the type that we expect. assert isinstance(response, google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -14082,18 +15588,29 @@ def test_get_google_channel_config_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_google_channel_config") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.EventarcRestInterceptor, "post_get_google_channel_config" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, + "post_get_google_channel_config_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_get_google_channel_config" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.GetGoogleChannelConfigRequest.pb(eventarc.GetGoogleChannelConfigRequest()) + pb_message = eventarc.GetGoogleChannelConfigRequest.pb( + eventarc.GetGoogleChannelConfigRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14104,39 +15621,58 @@ def test_get_google_channel_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = google_channel_config.GoogleChannelConfig.to_json(google_channel_config.GoogleChannelConfig()) + return_value = google_channel_config.GoogleChannelConfig.to_json( + google_channel_config.GoogleChannelConfig() + ) req.return_value.content = return_value request = eventarc.GetGoogleChannelConfigRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = google_channel_config.GoogleChannelConfig() - post_with_metadata.return_value = google_channel_config.GoogleChannelConfig(), metadata + post_with_metadata.return_value = ( + google_channel_config.GoogleChannelConfig(), + metadata, + ) - client.get_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_google_channel_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_update_google_channel_config_rest_bad_request(request_type=eventarc.UpdateGoogleChannelConfigRequest): +def test_update_google_channel_config_rest_bad_request( + request_type=eventarc.UpdateGoogleChannelConfigRequest, +): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request_init = { + "google_channel_config": { + "name": "projects/sample1/locations/sample2/googleChannelConfig" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -14145,25 +15681,37 @@ def test_update_google_channel_config_rest_bad_request(request_type=eventarc.Upd client.update_google_channel_config(request) -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateGoogleChannelConfigRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + eventarc.UpdateGoogleChannelConfigRequest, + dict, + ], +) def test_update_google_channel_config_rest_call_success(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} - request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} + request_init = { + "google_channel_config": { + "name": "projects/sample1/locations/sample2/googleChannelConfig" + } + } + request_init["google_channel_config"] = { + "name": "projects/sample1/locations/sample2/googleChannelConfig", + "update_time": {"seconds": 751, "nanos": 543}, + "crypto_key_name": "crypto_key_name_value", + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"] + test_field = eventarc.UpdateGoogleChannelConfigRequest.meta.fields[ + "google_channel_config" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -14177,7 +15725,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -14191,7 +15739,9 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER + for field, value in request_init[ + "google_channel_config" + ].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -14206,12 +15756,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -14224,11 +15778,11 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", ) # Wrap the value into a proper Response obj @@ -14238,15 +15792,15 @@ def get_message_fields(field): # Convert return value to protobuf type return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_google_channel_config(request) # Establish that the response is the type that we expect. assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -14254,18 +15808,29 @@ def test_update_google_channel_config_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) + ) client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_google_channel_config") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.EventarcRestInterceptor, "post_update_google_channel_config" + ) as post, + mock.patch.object( + transports.EventarcRestInterceptor, + "post_update_google_channel_config_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.EventarcRestInterceptor, "pre_update_google_channel_config" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb(eventarc.UpdateGoogleChannelConfigRequest()) + pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb( + eventarc.UpdateGoogleChannelConfigRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -14276,19 +15841,30 @@ def test_update_google_channel_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gce_google_channel_config.GoogleChannelConfig.to_json(gce_google_channel_config.GoogleChannelConfig()) + return_value = gce_google_channel_config.GoogleChannelConfig.to_json( + gce_google_channel_config.GoogleChannelConfig() + ) req.return_value.content = return_value request = eventarc.UpdateGoogleChannelConfigRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata post.return_value = gce_google_channel_config.GoogleChannelConfig() - post_with_metadata.return_value = gce_google_channel_config.GoogleChannelConfig(), metadata + post_with_metadata.return_value = ( + gce_google_channel_config.GoogleChannelConfig(), + metadata, + ) - client.update_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_google_channel_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -14301,13 +15877,18 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -14316,20 +15897,23 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq client.get_location(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) def test_get_location_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -14337,7 +15921,7 @@ def test_get_location_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -14348,19 +15932,24 @@ def test_get_location_rest(request_type): assert isinstance(response, locations_pb2.Location) -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -14369,20 +15958,23 @@ def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocation client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) def test_list_locations_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -14390,7 +15982,7 @@ def test_list_locations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -14401,19 +15993,26 @@ def test_list_locations_rest(request_type): assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/triggers/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -14422,20 +16021,23 @@ def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolic client.get_iam_policy(request) -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) def test_get_iam_policy_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {"resource": "projects/sample1/locations/sample2/triggers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = policy_pb2.Policy() @@ -14443,7 +16045,7 @@ def test_get_iam_policy_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -14454,19 +16056,26 @@ def test_get_iam_policy_rest(request_type): assert isinstance(response, policy_pb2.Policy) -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/triggers/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -14475,20 +16084,23 @@ def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolic client.set_iam_policy(request) -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) def test_set_iam_policy_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {"resource": "projects/sample1/locations/sample2/triggers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = policy_pb2.Policy() @@ -14496,7 +16108,7 @@ def test_set_iam_policy_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -14507,19 +16119,26 @@ def test_set_iam_policy_rest(request_type): assert isinstance(response, policy_pb2.Policy) -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/sample2/triggers/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -14528,20 +16147,23 @@ def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestI client.test_iam_permissions(request) -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) def test_test_iam_permissions_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {"resource": "projects/sample1/locations/sample2/triggers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = iam_policy_pb2.TestIamPermissionsResponse() @@ -14549,7 +16171,7 @@ def test_test_iam_permissions_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -14560,19 +16182,26 @@ def test_test_iam_permissions_rest(request_type): assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -14581,28 +16210,31 @@ def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOpe client.cancel_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) def test_cancel_operation_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -14613,19 +16245,26 @@ def test_cancel_operation_rest(request_type): assert response is None -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -14634,28 +16273,31 @@ def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOpe client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) def test_delete_operation_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -14666,19 +16308,26 @@ def test_delete_operation_rest(request_type): assert response is None -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -14687,20 +16336,23 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -14708,7 +16360,7 @@ def test_get_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -14719,19 +16371,26 @@ def test_get_operation_rest(request_type): assert isinstance(response, operations_pb2.Operation) -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -14740,20 +16399,23 @@ def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperat client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) def test_list_operations_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -14761,7 +16423,7 @@ def test_list_operations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -14771,10 +16433,10 @@ def test_list_operations_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + def test_initialize_client_w_rest(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) assert client is not None @@ -14788,9 +16450,7 @@ def test_get_trigger_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.get_trigger), "__call__") as call: client.get_trigger(request=None) # Establish that the underlying stub method was called. @@ -14810,9 +16470,7 @@ def test_list_triggers_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_triggers), "__call__") as call: client.list_triggers(request=None) # Establish that the underlying stub method was called. @@ -14832,9 +16490,7 @@ def test_create_trigger_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.create_trigger), "__call__") as call: client.create_trigger(request=None) # Establish that the underlying stub method was called. @@ -14854,9 +16510,7 @@ def test_update_trigger_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.update_trigger), "__call__") as call: client.update_trigger(request=None) # Establish that the underlying stub method was called. @@ -14876,9 +16530,7 @@ def test_delete_trigger_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_trigger), "__call__") as call: client.delete_trigger(request=None) # Establish that the underlying stub method was called. @@ -14898,9 +16550,7 @@ def test_get_channel_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.get_channel), "__call__") as call: client.get_channel(request=None) # Establish that the underlying stub method was called. @@ -14920,9 +16570,7 @@ def test_list_channels_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: + with mock.patch.object(type(client.transport.list_channels), "__call__") as call: client.list_channels(request=None) # Establish that the underlying stub method was called. @@ -14942,9 +16590,7 @@ def test_create_channel_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: + with mock.patch.object(type(client.transport.create_channel_), "__call__") as call: client.create_channel(request=None) # Establish that the underlying stub method was called. @@ -14964,9 +16610,7 @@ def test_update_channel_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.update_channel), "__call__") as call: client.update_channel(request=None) # Establish that the underlying stub method was called. @@ -14986,9 +16630,7 @@ def test_delete_channel_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_channel), "__call__") as call: client.delete_channel(request=None) # Establish that the underlying stub method was called. @@ -15008,9 +16650,7 @@ def test_get_provider_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: + with mock.patch.object(type(client.transport.get_provider), "__call__") as call: client.get_provider(request=None) # Establish that the underlying stub method was called. @@ -15030,9 +16670,7 @@ def test_list_providers_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: + with mock.patch.object(type(client.transport.list_providers), "__call__") as call: client.list_providers(request=None) # Establish that the underlying stub method was called. @@ -15053,8 +16691,8 @@ def test_get_channel_connection_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: + type(client.transport.get_channel_connection), "__call__" + ) as call: client.get_channel_connection(request=None) # Establish that the underlying stub method was called. @@ -15075,8 +16713,8 @@ def test_list_channel_connections_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: + type(client.transport.list_channel_connections), "__call__" + ) as call: client.list_channel_connections(request=None) # Establish that the underlying stub method was called. @@ -15097,8 +16735,8 @@ def test_create_channel_connection_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: + type(client.transport.create_channel_connection), "__call__" + ) as call: client.create_channel_connection(request=None) # Establish that the underlying stub method was called. @@ -15119,8 +16757,8 @@ def test_delete_channel_connection_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: + type(client.transport.delete_channel_connection), "__call__" + ) as call: client.delete_channel_connection(request=None) # Establish that the underlying stub method was called. @@ -15141,8 +16779,8 @@ def test_get_google_channel_config_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: + type(client.transport.get_google_channel_config), "__call__" + ) as call: client.get_google_channel_config(request=None) # Establish that the underlying stub method was called. @@ -15163,8 +16801,8 @@ def test_update_google_channel_config_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: + type(client.transport.update_google_channel_config), "__call__" + ) as call: client.update_google_channel_config(request=None) # Establish that the underlying stub method was called. @@ -15185,12 +16823,13 @@ def test_eventarc_rest_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AbstractOperationsClient, + operations_v1.AbstractOperationsClient, ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = EventarcClient( @@ -15201,18 +16840,21 @@ def test_transport_grpc_default(): transports.EventarcGrpcTransport, ) + def test_eventarc_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.EventarcTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_eventarc_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport.__init__') as Transport: + with mock.patch( + "google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.EventarcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -15221,33 +16863,33 @@ def test_eventarc_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'get_trigger', - 'list_triggers', - 'create_trigger', - 'update_trigger', - 'delete_trigger', - 'get_channel', - 'list_channels', - 'create_channel_', - 'update_channel', - 'delete_channel', - 'get_provider', - 'list_providers', - 'get_channel_connection', - 'list_channel_connections', - 'create_channel_connection', - 'delete_channel_connection', - 'get_google_channel_config', - 'update_google_channel_config', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + "get_trigger", + "list_triggers", + "create_trigger", + "update_trigger", + "delete_trigger", + "get_channel", + "list_channels", + "create_channel_", + "update_channel", + "delete_channel", + "get_provider", + "list_providers", + "get_channel_connection", + "list_channel_connections", + "create_channel_connection", + "delete_channel_connection", + "get_google_channel_config", + "update_google_channel_config", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -15263,7 +16905,7 @@ def test_eventarc_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -15272,25 +16914,36 @@ def test_eventarc_base_transport(): def test_eventarc_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.EventarcTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_eventarc_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.EventarcTransport() @@ -15299,14 +16952,12 @@ def test_eventarc_base_transport_with_adc(): def test_eventarc_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) EventarcClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @@ -15321,12 +16972,12 @@ def test_eventarc_auth_adc(): def test_eventarc_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @@ -15340,48 +16991,46 @@ def test_eventarc_transport_auth_adc(transport_class): ], ) def test_eventarc_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.EventarcGrpcTransport, grpc_helpers), - (transports.EventarcGrpcAsyncIOTransport, grpc_helpers_async) + (transports.EventarcGrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_eventarc_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "eventarc.googleapis.com:443", credentials=creds, credentials_file=None, quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=["1", "2"], default_host="eventarc.googleapis.com", ssl_credentials=None, @@ -15392,10 +17041,11 @@ def test_eventarc_transport_create_channel(transport_class, grpc_helpers): ) -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport], +) +def test_eventarc_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -15404,7 +17054,7 @@ def test_eventarc_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -15425,61 +17075,77 @@ def test_eventarc_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) + def test_eventarc_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.EventarcRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.EventarcRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_eventarc_host_no_port(transport_name): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="eventarc.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'eventarc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://eventarc.googleapis.com' + "eventarc.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://eventarc.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_eventarc_host_with_port(transport_name): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="eventarc.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'eventarc.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://eventarc.googleapis.com:8000' + "eventarc.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://eventarc.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_eventarc_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -15545,8 +17211,10 @@ def test_eventarc_client_transport_session_collision(transport_name): session1 = client1.transport.update_google_channel_config._session session2 = client2.transport.update_google_channel_config._session assert session1 != session2 + + def test_eventarc_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.EventarcGrpcTransport( @@ -15559,7 +17227,7 @@ def test_eventarc_grpc_transport_channel(): def test_eventarc_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.EventarcGrpcAsyncIOTransport( @@ -15574,12 +17242,17 @@ def test_eventarc_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: +@pytest.mark.parametrize( + "transport_class", + [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport], +) +def test_eventarc_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -15588,7 +17261,7 @@ def test_eventarc_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -15618,17 +17291,20 @@ def test_eventarc_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport], +) +def test_eventarc_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -15659,7 +17335,7 @@ def test_eventarc_transport_channel_mtls_with_adc( def test_eventarc_grpc_lro_client(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) transport = client.transport @@ -15676,7 +17352,7 @@ def test_eventarc_grpc_lro_client(): def test_eventarc_grpc_lro_async_client(): client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', + transport="grpc_asyncio", ) transport = client.transport @@ -15694,7 +17370,11 @@ def test_channel_path(): project = "squid" location = "clam" channel = "whelk" - expected = "projects/{project}/locations/{location}/channels/{channel}".format(project=project, location=location, channel=channel, ) + expected = "projects/{project}/locations/{location}/channels/{channel}".format( + project=project, + location=location, + channel=channel, + ) actual = EventarcClient.channel_path(project, location, channel) assert expected == actual @@ -15711,12 +17391,19 @@ def test_parse_channel_path(): actual = EventarcClient.parse_channel_path(path) assert expected == actual + def test_channel_connection_path(): project = "cuttlefish" location = "mussel" channel_connection = "winkle" - expected = "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format(project=project, location=location, channel_connection=channel_connection, ) - actual = EventarcClient.channel_connection_path(project, location, channel_connection) + expected = "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format( + project=project, + location=location, + channel_connection=channel_connection, + ) + actual = EventarcClient.channel_connection_path( + project, location, channel_connection + ) assert expected == actual @@ -15732,11 +17419,16 @@ def test_parse_channel_connection_path(): actual = EventarcClient.parse_channel_connection_path(path) assert expected == actual + def test_cloud_function_path(): project = "squid" location = "clam" function = "whelk" - expected = "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, ) + expected = "projects/{project}/locations/{location}/functions/{function}".format( + project=project, + location=location, + function=function, + ) actual = EventarcClient.cloud_function_path(project, location, function) assert expected == actual @@ -15753,12 +17445,18 @@ def test_parse_cloud_function_path(): actual = EventarcClient.parse_cloud_function_path(path) assert expected == actual + def test_crypto_key_path(): project = "cuttlefish" location = "mussel" key_ring = "winkle" crypto_key = "nautilus" - expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) actual = EventarcClient.crypto_key_path(project, location, key_ring, crypto_key) assert expected == actual @@ -15776,10 +17474,14 @@ def test_parse_crypto_key_path(): actual = EventarcClient.parse_crypto_key_path(path) assert expected == actual + def test_google_channel_config_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}/googleChannelConfig".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}/googleChannelConfig".format( + project=project, + location=location, + ) actual = EventarcClient.google_channel_config_path(project, location) assert expected == actual @@ -15795,11 +17497,16 @@ def test_parse_google_channel_config_path(): actual = EventarcClient.parse_google_channel_config_path(path) assert expected == actual + def test_provider_path(): project = "cuttlefish" location = "mussel" provider = "winkle" - expected = "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) + expected = "projects/{project}/locations/{location}/providers/{provider}".format( + project=project, + location=location, + provider=provider, + ) actual = EventarcClient.provider_path(project, location, provider) assert expected == actual @@ -15816,6 +17523,7 @@ def test_parse_provider_path(): actual = EventarcClient.parse_provider_path(path) assert expected == actual + def test_service_path(): expected = "*".format() actual = EventarcClient.service_path() @@ -15823,18 +17531,21 @@ def test_service_path(): def test_parse_service_path(): - expected = { - } + expected = {} path = EventarcClient.service_path(**expected) # Check that the path construction is reversible. actual = EventarcClient.parse_service_path(path) assert expected == actual + def test_service_account_path(): project = "squid" service_account = "clam" - expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + expected = "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) actual = EventarcClient.service_account_path(project, service_account) assert expected == actual @@ -15850,11 +17561,16 @@ def test_parse_service_account_path(): actual = EventarcClient.parse_service_account_path(path) assert expected == actual + def test_trigger_path(): project = "oyster" location = "nudibranch" trigger = "cuttlefish" - expected = "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) + expected = "projects/{project}/locations/{location}/triggers/{trigger}".format( + project=project, + location=location, + trigger=trigger, + ) actual = EventarcClient.trigger_path(project, location, trigger) assert expected == actual @@ -15871,11 +17587,16 @@ def test_parse_trigger_path(): actual = EventarcClient.parse_trigger_path(path) assert expected == actual + def test_workflow_path(): project = "scallop" location = "abalone" workflow = "squid" - expected = "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) + expected = "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, + location=location, + workflow=workflow, + ) actual = EventarcClient.workflow_path(project, location, workflow) assert expected == actual @@ -15892,9 +17613,12 @@ def test_parse_workflow_path(): actual = EventarcClient.parse_workflow_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = EventarcClient.common_billing_account_path(billing_account) assert expected == actual @@ -15909,9 +17633,12 @@ def test_parse_common_billing_account_path(): actual = EventarcClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = EventarcClient.common_folder_path(folder) assert expected == actual @@ -15926,9 +17653,12 @@ def test_parse_common_folder_path(): actual = EventarcClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = EventarcClient.common_organization_path(organization) assert expected == actual @@ -15943,9 +17673,12 @@ def test_parse_common_organization_path(): actual = EventarcClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = EventarcClient.common_project_path(project) assert expected == actual @@ -15960,10 +17693,14 @@ def test_parse_common_project_path(): actual = EventarcClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = EventarcClient.common_location_path(project, location) assert expected == actual @@ -15983,14 +17720,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.EventarcTransport, "_prep_wrapped_messages" + ) as prep: client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.EventarcTransport, "_prep_wrapped_messages" + ) as prep: transport_class = EventarcClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -16001,7 +17742,8 @@ def test_client_with_default_client_info(): def test_delete_operation(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16021,10 +17763,12 @@ def test_delete_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16034,9 +17778,7 @@ async def test_delete_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16059,7 +17801,7 @@ def test_delete_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.delete_operation(request) # Establish that the underlying gRPC stub method was called. @@ -16069,7 +17811,11 @@ def test_delete_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): @@ -16084,9 +17830,7 @@ async def test_delete_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16095,7 +17839,10 @@ async def test_delete_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_delete_operation_from_dict(): @@ -16114,6 +17861,7 @@ def test_delete_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = EventarcAsyncClient( @@ -16122,9 +17870,7 @@ async def test_delete_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_operation( request={ "name": "locations", @@ -16148,6 +17894,7 @@ def test_delete_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.DeleteOperationRequest() + @pytest.mark.asyncio async def test_delete_operation_flattened_async(): client = EventarcAsyncClient( @@ -16156,9 +17903,7 @@ async def test_delete_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16168,7 +17913,8 @@ async def test_delete_operation_flattened_async(): def test_cancel_operation(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16188,10 +17934,12 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16201,9 +17949,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16226,7 +17972,7 @@ def test_cancel_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. @@ -16236,7 +17982,11 @@ def test_cancel_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): @@ -16251,9 +18001,7 @@ async def test_cancel_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16262,7 +18010,10 @@ async def test_cancel_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_cancel_operation_from_dict(): @@ -16281,6 +18032,7 @@ def test_cancel_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = EventarcAsyncClient( @@ -16289,9 +18041,7 @@ async def test_cancel_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation( request={ "name": "locations", @@ -16315,6 +18065,7 @@ def test_cancel_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.CancelOperationRequest() + @pytest.mark.asyncio async def test_cancel_operation_flattened_async(): client = EventarcAsyncClient( @@ -16323,9 +18074,7 @@ async def test_cancel_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -16335,7 +18084,8 @@ async def test_cancel_operation_flattened_async(): def test_get_operation(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16355,10 +18105,12 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16403,7 +18155,11 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): @@ -16429,7 +18185,10 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_get_operation_from_dict(): @@ -16448,6 +18207,7 @@ def test_get_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = EventarcAsyncClient( @@ -16482,6 +18242,7 @@ def test_get_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.GetOperationRequest() + @pytest.mark.asyncio async def test_get_operation_flattened_async(): client = EventarcAsyncClient( @@ -16502,7 +18263,8 @@ async def test_get_operation_flattened_async(): def test_list_operations(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16522,10 +18284,12 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16570,7 +18334,11 @@ def test_list_operations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_operations_field_headers_async(): @@ -16596,7 +18364,10 @@ async def test_list_operations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_operations_from_dict(): @@ -16615,6 +18386,7 @@ def test_list_operations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = EventarcAsyncClient( @@ -16649,6 +18421,7 @@ def test_list_operations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.ListOperationsRequest() + @pytest.mark.asyncio async def test_list_operations_flattened_async(): client = EventarcAsyncClient( @@ -16669,7 +18442,8 @@ async def test_list_operations_flattened_async(): def test_list_locations(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16689,10 +18463,12 @@ def test_list_locations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) + @pytest.mark.asyncio async def test_list_locations_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16737,7 +18513,11 @@ def test_list_locations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_locations_field_headers_async(): @@ -16763,7 +18543,10 @@ async def test_list_locations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_locations_from_dict(): @@ -16782,6 +18565,7 @@ def test_list_locations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_locations_from_dict_async(): client = EventarcAsyncClient( @@ -16816,6 +18600,7 @@ def test_list_locations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == locations_pb2.ListLocationsRequest() + @pytest.mark.asyncio async def test_list_locations_flattened_async(): client = EventarcAsyncClient( @@ -16836,7 +18621,8 @@ async def test_list_locations_flattened_async(): def test_get_location(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16856,10 +18642,12 @@ def test_get_location(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.Location) + @pytest.mark.asyncio async def test_get_location_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16883,8 +18671,7 @@ async def test_get_location_async(transport: str = "grpc_asyncio"): def test_get_location_field_headers(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials()) + client = EventarcClient(credentials=ga_credentials.AnonymousCredentials()) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -16903,13 +18690,15 @@ def test_get_location_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_location_field_headers_async(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials() - ) + client = EventarcAsyncClient(credentials=async_anonymous_credentials()) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -16929,7 +18718,10 @@ async def test_get_location_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] def test_get_location_from_dict(): @@ -16948,6 +18740,7 @@ def test_get_location_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_location_from_dict_async(): client = EventarcAsyncClient( @@ -16982,6 +18775,7 @@ def test_get_location_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == locations_pb2.GetLocationRequest() + @pytest.mark.asyncio async def test_get_location_flattened_async(): client = EventarcAsyncClient( @@ -17002,7 +18796,8 @@ async def test_get_location_flattened_async(): def test_set_iam_policy(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -17012,7 +18807,10 @@ def test_set_iam_policy(transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) response = client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -17027,10 +18825,12 @@ def test_set_iam_policy(transport: str = "grpc"): assert response.etag == b"etag_blob" + @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -17042,7 +18842,10 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): # Designate an appropriate return value for the call. # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) ) response = await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. @@ -17082,7 +18885,11 @@ def test_set_iam_policy_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): @@ -17108,7 +18915,10 @@ async def test_set_iam_policy_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_set_iam_policy_from_dict(): @@ -17137,9 +18947,7 @@ async def test_set_iam_policy_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) response = await client.set_iam_policy( request={ @@ -17175,9 +18983,7 @@ async def test_set_iam_policy_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.set_iam_policy() @@ -17186,9 +18992,11 @@ async def test_set_iam_policy_flattened_async(): _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + def test_get_iam_policy(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -17198,7 +19006,10 @@ def test_get_iam_policy(transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) response = client.get_iam_policy(request) @@ -17219,7 +19030,8 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -17227,12 +19039,13 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): request = iam_policy_pb2.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) ) response = await client.get_iam_policy(request) @@ -17274,7 +19087,10 @@ def test_get_iam_policy_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -17289,9 +19105,7 @@ async def test_get_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.get_iam_policy(request) @@ -17303,7 +19117,10 @@ async def test_get_iam_policy_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_get_iam_policy_from_dict(): @@ -17323,6 +19140,7 @@ def test_get_iam_policy_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = EventarcAsyncClient( @@ -17331,9 +19149,7 @@ async def test_get_iam_policy_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) response = await client.get_iam_policy( request={ @@ -17369,9 +19185,7 @@ async def test_get_iam_policy_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.get_iam_policy() @@ -17380,9 +19194,11 @@ async def test_get_iam_policy_flattened_async(): _, args, _ = call.mock_calls[0] assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + def test_test_iam_permissions(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -17415,7 +19231,8 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -17428,7 +19245,9 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) ) response = await client.test_iam_permissions(request) @@ -17470,7 +19289,10 @@ def test_test_iam_permissions_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -17501,7 +19323,10 @@ async def test_test_iam_permissions_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_test_iam_permissions_from_dict(): @@ -17523,6 +19348,7 @@ def test_test_iam_permissions_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = EventarcAsyncClient( @@ -17551,7 +19377,9 @@ def test_test_iam_permissions_flattened(): credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.test_iam_permissions), "__call__") as call: + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy_pb2.TestIamPermissionsResponse() @@ -17569,7 +19397,9 @@ async def test_test_iam_permissions_flattened_async(): credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.test_iam_permissions), "__call__") as call: + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( iam_policy_pb2.TestIamPermissionsResponse() @@ -17585,10 +19415,11 @@ async def test_test_iam_permissions_flattened_async(): def test_transport_close_grpc(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -17597,10 +19428,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -17608,10 +19440,11 @@ async def test_transport_close_grpc_asyncio(): def test_transport_close_rest(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -17619,13 +19452,12 @@ def test_transport_close_rest(): def test_client_ctx(): transports = [ - 'rest', - 'grpc', + "rest", + "grpc", ] for transport in transports: client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -17634,10 +19466,14 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (EventarcClient, transports.EventarcGrpcTransport), - (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (EventarcClient, transports.EventarcGrpcTransport), + (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -17652,7 +19488,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py index 182a272e3182..8c65d450e2cd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py @@ -28,7 +28,6 @@ import os import shlex import sys -import logging from typing import Any # If extensions (or modules to document with autodoc) are in another directory, @@ -83,9 +82,9 @@ root_doc = "index" # General information about the project. -project = u"google-cloud-logging" -copyright = u"2025, Google, LLC" -author = u"Google APIs" +project = "google-cloud-logging" +copyright = "2025, Google, LLC" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -285,7 +284,7 @@ ( root_doc, "google-cloud-logging.tex", - u"google-cloud-logging Documentation", + "google-cloud-logging Documentation", author, "manual", ) @@ -386,6 +385,7 @@ napoleon_use_param = True napoleon_use_rtype = True + # Setup for sphinx behaviors such as warning filters. class UnexpectedUnindentFilter(logging.Filter): """Filter out warnings about unexpected unindentation following bullet lists.""" @@ -413,5 +413,5 @@ def setup(app: Any) -> None: """ # Sphinx's logger is hierarchical. Adding a filter to the # root 'sphinx' logger will catch warnings from all sub-loggers. - logger = logging.getLogger('sphinx') + logger = logging.getLogger("sphinx") logger.addFilter(UnexpectedUnindentFilter()) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py index 0e30784fd911..c94563aac93c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py @@ -18,170 +18,190 @@ __version__ = package_version.__version__ -from google.cloud.logging_v2.services.config_service_v2.client import ConfigServiceV2Client -from google.cloud.logging_v2.services.config_service_v2.async_client import ConfigServiceV2AsyncClient -from google.cloud.logging_v2.services.logging_service_v2.client import LoggingServiceV2Client -from google.cloud.logging_v2.services.logging_service_v2.async_client import LoggingServiceV2AsyncClient -from google.cloud.logging_v2.services.metrics_service_v2.client import MetricsServiceV2Client -from google.cloud.logging_v2.services.metrics_service_v2.async_client import MetricsServiceV2AsyncClient - -from google.cloud.logging_v2.types.log_entry import LogEntry -from google.cloud.logging_v2.types.log_entry import LogEntryOperation -from google.cloud.logging_v2.types.log_entry import LogEntrySourceLocation -from google.cloud.logging_v2.types.log_entry import LogSplit -from google.cloud.logging_v2.types.logging import DeleteLogRequest -from google.cloud.logging_v2.types.logging import ListLogEntriesRequest -from google.cloud.logging_v2.types.logging import ListLogEntriesResponse -from google.cloud.logging_v2.types.logging import ListLogsRequest -from google.cloud.logging_v2.types.logging import ListLogsResponse -from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsRequest -from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsResponse -from google.cloud.logging_v2.types.logging import TailLogEntriesRequest -from google.cloud.logging_v2.types.logging import TailLogEntriesResponse -from google.cloud.logging_v2.types.logging import WriteLogEntriesPartialErrors -from google.cloud.logging_v2.types.logging import WriteLogEntriesRequest -from google.cloud.logging_v2.types.logging import WriteLogEntriesResponse -from google.cloud.logging_v2.types.logging_config import BigQueryDataset -from google.cloud.logging_v2.types.logging_config import BigQueryOptions -from google.cloud.logging_v2.types.logging_config import BucketMetadata -from google.cloud.logging_v2.types.logging_config import CmekSettings -from google.cloud.logging_v2.types.logging_config import CopyLogEntriesMetadata -from google.cloud.logging_v2.types.logging_config import CopyLogEntriesRequest -from google.cloud.logging_v2.types.logging_config import CopyLogEntriesResponse -from google.cloud.logging_v2.types.logging_config import CreateBucketRequest -from google.cloud.logging_v2.types.logging_config import CreateExclusionRequest -from google.cloud.logging_v2.types.logging_config import CreateLinkRequest -from google.cloud.logging_v2.types.logging_config import CreateSinkRequest -from google.cloud.logging_v2.types.logging_config import CreateViewRequest -from google.cloud.logging_v2.types.logging_config import DeleteBucketRequest -from google.cloud.logging_v2.types.logging_config import DeleteExclusionRequest -from google.cloud.logging_v2.types.logging_config import DeleteLinkRequest -from google.cloud.logging_v2.types.logging_config import DeleteSinkRequest -from google.cloud.logging_v2.types.logging_config import DeleteViewRequest -from google.cloud.logging_v2.types.logging_config import GetBucketRequest -from google.cloud.logging_v2.types.logging_config import GetCmekSettingsRequest -from google.cloud.logging_v2.types.logging_config import GetExclusionRequest -from google.cloud.logging_v2.types.logging_config import GetLinkRequest -from google.cloud.logging_v2.types.logging_config import GetSettingsRequest -from google.cloud.logging_v2.types.logging_config import GetSinkRequest -from google.cloud.logging_v2.types.logging_config import GetViewRequest -from google.cloud.logging_v2.types.logging_config import IndexConfig -from google.cloud.logging_v2.types.logging_config import Link -from google.cloud.logging_v2.types.logging_config import LinkMetadata -from google.cloud.logging_v2.types.logging_config import ListBucketsRequest -from google.cloud.logging_v2.types.logging_config import ListBucketsResponse -from google.cloud.logging_v2.types.logging_config import ListExclusionsRequest -from google.cloud.logging_v2.types.logging_config import ListExclusionsResponse -from google.cloud.logging_v2.types.logging_config import ListLinksRequest -from google.cloud.logging_v2.types.logging_config import ListLinksResponse -from google.cloud.logging_v2.types.logging_config import ListSinksRequest -from google.cloud.logging_v2.types.logging_config import ListSinksResponse -from google.cloud.logging_v2.types.logging_config import ListViewsRequest -from google.cloud.logging_v2.types.logging_config import ListViewsResponse -from google.cloud.logging_v2.types.logging_config import LocationMetadata -from google.cloud.logging_v2.types.logging_config import LogBucket -from google.cloud.logging_v2.types.logging_config import LogExclusion -from google.cloud.logging_v2.types.logging_config import LogSink -from google.cloud.logging_v2.types.logging_config import LogView -from google.cloud.logging_v2.types.logging_config import Settings -from google.cloud.logging_v2.types.logging_config import UndeleteBucketRequest -from google.cloud.logging_v2.types.logging_config import UpdateBucketRequest -from google.cloud.logging_v2.types.logging_config import UpdateCmekSettingsRequest -from google.cloud.logging_v2.types.logging_config import UpdateExclusionRequest -from google.cloud.logging_v2.types.logging_config import UpdateSettingsRequest -from google.cloud.logging_v2.types.logging_config import UpdateSinkRequest -from google.cloud.logging_v2.types.logging_config import UpdateViewRequest -from google.cloud.logging_v2.types.logging_config import IndexType -from google.cloud.logging_v2.types.logging_config import LifecycleState -from google.cloud.logging_v2.types.logging_config import OperationState -from google.cloud.logging_v2.types.logging_metrics import CreateLogMetricRequest -from google.cloud.logging_v2.types.logging_metrics import DeleteLogMetricRequest -from google.cloud.logging_v2.types.logging_metrics import GetLogMetricRequest -from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsRequest -from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsResponse -from google.cloud.logging_v2.types.logging_metrics import LogMetric -from google.cloud.logging_v2.types.logging_metrics import UpdateLogMetricRequest +from google.cloud.logging_v2.services.config_service_v2.async_client import ( + ConfigServiceV2AsyncClient, +) +from google.cloud.logging_v2.services.config_service_v2.client import ( + ConfigServiceV2Client, +) +from google.cloud.logging_v2.services.logging_service_v2.async_client import ( + LoggingServiceV2AsyncClient, +) +from google.cloud.logging_v2.services.logging_service_v2.client import ( + LoggingServiceV2Client, +) +from google.cloud.logging_v2.services.metrics_service_v2.async_client import ( + MetricsServiceV2AsyncClient, +) +from google.cloud.logging_v2.services.metrics_service_v2.client import ( + MetricsServiceV2Client, +) +from google.cloud.logging_v2.types.log_entry import ( + LogEntry, + LogEntryOperation, + LogEntrySourceLocation, + LogSplit, +) +from google.cloud.logging_v2.types.logging import ( + DeleteLogRequest, + ListLogEntriesRequest, + ListLogEntriesResponse, + ListLogsRequest, + ListLogsResponse, + ListMonitoredResourceDescriptorsRequest, + ListMonitoredResourceDescriptorsResponse, + TailLogEntriesRequest, + TailLogEntriesResponse, + WriteLogEntriesPartialErrors, + WriteLogEntriesRequest, + WriteLogEntriesResponse, +) +from google.cloud.logging_v2.types.logging_config import ( + BigQueryDataset, + BigQueryOptions, + BucketMetadata, + CmekSettings, + CopyLogEntriesMetadata, + CopyLogEntriesRequest, + CopyLogEntriesResponse, + CreateBucketRequest, + CreateExclusionRequest, + CreateLinkRequest, + CreateSinkRequest, + CreateViewRequest, + DeleteBucketRequest, + DeleteExclusionRequest, + DeleteLinkRequest, + DeleteSinkRequest, + DeleteViewRequest, + GetBucketRequest, + GetCmekSettingsRequest, + GetExclusionRequest, + GetLinkRequest, + GetSettingsRequest, + GetSinkRequest, + GetViewRequest, + IndexConfig, + IndexType, + LifecycleState, + Link, + LinkMetadata, + ListBucketsRequest, + ListBucketsResponse, + ListExclusionsRequest, + ListExclusionsResponse, + ListLinksRequest, + ListLinksResponse, + ListSinksRequest, + ListSinksResponse, + ListViewsRequest, + ListViewsResponse, + LocationMetadata, + LogBucket, + LogExclusion, + LogSink, + LogView, + OperationState, + Settings, + UndeleteBucketRequest, + UpdateBucketRequest, + UpdateCmekSettingsRequest, + UpdateExclusionRequest, + UpdateSettingsRequest, + UpdateSinkRequest, + UpdateViewRequest, +) +from google.cloud.logging_v2.types.logging_metrics import ( + CreateLogMetricRequest, + DeleteLogMetricRequest, + GetLogMetricRequest, + ListLogMetricsRequest, + ListLogMetricsResponse, + LogMetric, + UpdateLogMetricRequest, +) -__all__ = ('ConfigServiceV2Client', - 'ConfigServiceV2AsyncClient', - 'LoggingServiceV2Client', - 'LoggingServiceV2AsyncClient', - 'MetricsServiceV2Client', - 'MetricsServiceV2AsyncClient', - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', - 'DeleteLogRequest', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'BigQueryDataset', - 'BigQueryOptions', - 'BucketMetadata', - 'CmekSettings', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesRequest', - 'CopyLogEntriesResponse', - 'CreateBucketRequest', - 'CreateExclusionRequest', - 'CreateLinkRequest', - 'CreateSinkRequest', - 'CreateViewRequest', - 'DeleteBucketRequest', - 'DeleteExclusionRequest', - 'DeleteLinkRequest', - 'DeleteSinkRequest', - 'DeleteViewRequest', - 'GetBucketRequest', - 'GetCmekSettingsRequest', - 'GetExclusionRequest', - 'GetLinkRequest', - 'GetSettingsRequest', - 'GetSinkRequest', - 'GetViewRequest', - 'IndexConfig', - 'Link', - 'LinkMetadata', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'ListLinksRequest', - 'ListLinksResponse', - 'ListSinksRequest', - 'ListSinksResponse', - 'ListViewsRequest', - 'ListViewsResponse', - 'LocationMetadata', - 'LogBucket', - 'LogExclusion', - 'LogSink', - 'LogView', - 'Settings', - 'UndeleteBucketRequest', - 'UpdateBucketRequest', - 'UpdateCmekSettingsRequest', - 'UpdateExclusionRequest', - 'UpdateSettingsRequest', - 'UpdateSinkRequest', - 'UpdateViewRequest', - 'IndexType', - 'LifecycleState', - 'OperationState', - 'CreateLogMetricRequest', - 'DeleteLogMetricRequest', - 'GetLogMetricRequest', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'LogMetric', - 'UpdateLogMetricRequest', +__all__ = ( + "ConfigServiceV2Client", + "ConfigServiceV2AsyncClient", + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient", + "MetricsServiceV2Client", + "MetricsServiceV2AsyncClient", + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "Settings", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "IndexType", + "LifecycleState", + "OperationState", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "LogMetric", + "UpdateLogMetricRequest", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index 242a43882502..b95e671e8d91 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.logging_v2 import gapic_version as package_version +import sys import google.api_core as api_core -import sys +from google.cloud.logging_v2 import gapic_version as package_version __version__ = package_version.__version__ @@ -28,117 +28,136 @@ import importlib_metadata as metadata -from .services.config_service_v2 import ConfigServiceV2Client -from .services.config_service_v2 import ConfigServiceV2AsyncClient -from .services.logging_service_v2 import LoggingServiceV2Client -from .services.logging_service_v2 import LoggingServiceV2AsyncClient -from .services.metrics_service_v2 import MetricsServiceV2Client -from .services.metrics_service_v2 import MetricsServiceV2AsyncClient - -from .types.log_entry import LogEntry -from .types.log_entry import LogEntryOperation -from .types.log_entry import LogEntrySourceLocation -from .types.log_entry import LogSplit -from .types.logging import DeleteLogRequest -from .types.logging import ListLogEntriesRequest -from .types.logging import ListLogEntriesResponse -from .types.logging import ListLogsRequest -from .types.logging import ListLogsResponse -from .types.logging import ListMonitoredResourceDescriptorsRequest -from .types.logging import ListMonitoredResourceDescriptorsResponse -from .types.logging import TailLogEntriesRequest -from .types.logging import TailLogEntriesResponse -from .types.logging import WriteLogEntriesPartialErrors -from .types.logging import WriteLogEntriesRequest -from .types.logging import WriteLogEntriesResponse -from .types.logging_config import BigQueryDataset -from .types.logging_config import BigQueryOptions -from .types.logging_config import BucketMetadata -from .types.logging_config import CmekSettings -from .types.logging_config import CopyLogEntriesMetadata -from .types.logging_config import CopyLogEntriesRequest -from .types.logging_config import CopyLogEntriesResponse -from .types.logging_config import CreateBucketRequest -from .types.logging_config import CreateExclusionRequest -from .types.logging_config import CreateLinkRequest -from .types.logging_config import CreateSinkRequest -from .types.logging_config import CreateViewRequest -from .types.logging_config import DeleteBucketRequest -from .types.logging_config import DeleteExclusionRequest -from .types.logging_config import DeleteLinkRequest -from .types.logging_config import DeleteSinkRequest -from .types.logging_config import DeleteViewRequest -from .types.logging_config import GetBucketRequest -from .types.logging_config import GetCmekSettingsRequest -from .types.logging_config import GetExclusionRequest -from .types.logging_config import GetLinkRequest -from .types.logging_config import GetSettingsRequest -from .types.logging_config import GetSinkRequest -from .types.logging_config import GetViewRequest -from .types.logging_config import IndexConfig -from .types.logging_config import Link -from .types.logging_config import LinkMetadata -from .types.logging_config import ListBucketsRequest -from .types.logging_config import ListBucketsResponse -from .types.logging_config import ListExclusionsRequest -from .types.logging_config import ListExclusionsResponse -from .types.logging_config import ListLinksRequest -from .types.logging_config import ListLinksResponse -from .types.logging_config import ListSinksRequest -from .types.logging_config import ListSinksResponse -from .types.logging_config import ListViewsRequest -from .types.logging_config import ListViewsResponse -from .types.logging_config import LocationMetadata -from .types.logging_config import LogBucket -from .types.logging_config import LogExclusion -from .types.logging_config import LogSink -from .types.logging_config import LogView -from .types.logging_config import Settings -from .types.logging_config import UndeleteBucketRequest -from .types.logging_config import UpdateBucketRequest -from .types.logging_config import UpdateCmekSettingsRequest -from .types.logging_config import UpdateExclusionRequest -from .types.logging_config import UpdateSettingsRequest -from .types.logging_config import UpdateSinkRequest -from .types.logging_config import UpdateViewRequest -from .types.logging_config import IndexType -from .types.logging_config import LifecycleState -from .types.logging_config import OperationState -from .types.logging_metrics import CreateLogMetricRequest -from .types.logging_metrics import DeleteLogMetricRequest -from .types.logging_metrics import GetLogMetricRequest -from .types.logging_metrics import ListLogMetricsRequest -from .types.logging_metrics import ListLogMetricsResponse -from .types.logging_metrics import LogMetric -from .types.logging_metrics import UpdateLogMetricRequest +from .services.config_service_v2 import ( + ConfigServiceV2AsyncClient, + ConfigServiceV2Client, +) +from .services.logging_service_v2 import ( + LoggingServiceV2AsyncClient, + LoggingServiceV2Client, +) +from .services.metrics_service_v2 import ( + MetricsServiceV2AsyncClient, + MetricsServiceV2Client, +) +from .types.log_entry import ( + LogEntry, + LogEntryOperation, + LogEntrySourceLocation, + LogSplit, +) +from .types.logging import ( + DeleteLogRequest, + ListLogEntriesRequest, + ListLogEntriesResponse, + ListLogsRequest, + ListLogsResponse, + ListMonitoredResourceDescriptorsRequest, + ListMonitoredResourceDescriptorsResponse, + TailLogEntriesRequest, + TailLogEntriesResponse, + WriteLogEntriesPartialErrors, + WriteLogEntriesRequest, + WriteLogEntriesResponse, +) +from .types.logging_config import ( + BigQueryDataset, + BigQueryOptions, + BucketMetadata, + CmekSettings, + CopyLogEntriesMetadata, + CopyLogEntriesRequest, + CopyLogEntriesResponse, + CreateBucketRequest, + CreateExclusionRequest, + CreateLinkRequest, + CreateSinkRequest, + CreateViewRequest, + DeleteBucketRequest, + DeleteExclusionRequest, + DeleteLinkRequest, + DeleteSinkRequest, + DeleteViewRequest, + GetBucketRequest, + GetCmekSettingsRequest, + GetExclusionRequest, + GetLinkRequest, + GetSettingsRequest, + GetSinkRequest, + GetViewRequest, + IndexConfig, + IndexType, + LifecycleState, + Link, + LinkMetadata, + ListBucketsRequest, + ListBucketsResponse, + ListExclusionsRequest, + ListExclusionsResponse, + ListLinksRequest, + ListLinksResponse, + ListSinksRequest, + ListSinksResponse, + ListViewsRequest, + ListViewsResponse, + LocationMetadata, + LogBucket, + LogExclusion, + LogSink, + LogView, + OperationState, + Settings, + UndeleteBucketRequest, + UpdateBucketRequest, + UpdateCmekSettingsRequest, + UpdateExclusionRequest, + UpdateSettingsRequest, + UpdateSinkRequest, + UpdateViewRequest, +) +from .types.logging_metrics import ( + CreateLogMetricRequest, + DeleteLogMetricRequest, + GetLogMetricRequest, + ListLogMetricsRequest, + ListLogMetricsResponse, + LogMetric, + UpdateLogMetricRequest, +) -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.logging_v2") # type: ignore - api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr( + api_core, "check_dependency_versions" +): # pragma: NO COVER + api_core.check_python_version("google.cloud.logging_v2") # type: ignore + api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: - import warnings import sys + import warnings _py_version_str = sys.version.split()[0] _package_label = "google.cloud.logging_v2" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -176,107 +195,111 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'ConfigServiceV2AsyncClient', - 'LoggingServiceV2AsyncClient', - 'MetricsServiceV2AsyncClient', -'BigQueryDataset', -'BigQueryOptions', -'BucketMetadata', -'CmekSettings', -'ConfigServiceV2Client', -'CopyLogEntriesMetadata', -'CopyLogEntriesRequest', -'CopyLogEntriesResponse', -'CreateBucketRequest', -'CreateExclusionRequest', -'CreateLinkRequest', -'CreateLogMetricRequest', -'CreateSinkRequest', -'CreateViewRequest', -'DeleteBucketRequest', -'DeleteExclusionRequest', -'DeleteLinkRequest', -'DeleteLogMetricRequest', -'DeleteLogRequest', -'DeleteSinkRequest', -'DeleteViewRequest', -'GetBucketRequest', -'GetCmekSettingsRequest', -'GetExclusionRequest', -'GetLinkRequest', -'GetLogMetricRequest', -'GetSettingsRequest', -'GetSinkRequest', -'GetViewRequest', -'IndexConfig', -'IndexType', -'LifecycleState', -'Link', -'LinkMetadata', -'ListBucketsRequest', -'ListBucketsResponse', -'ListExclusionsRequest', -'ListExclusionsResponse', -'ListLinksRequest', -'ListLinksResponse', -'ListLogEntriesRequest', -'ListLogEntriesResponse', -'ListLogMetricsRequest', -'ListLogMetricsResponse', -'ListLogsRequest', -'ListLogsResponse', -'ListMonitoredResourceDescriptorsRequest', -'ListMonitoredResourceDescriptorsResponse', -'ListSinksRequest', -'ListSinksResponse', -'ListViewsRequest', -'ListViewsResponse', -'LocationMetadata', -'LogBucket', -'LogEntry', -'LogEntryOperation', -'LogEntrySourceLocation', -'LogExclusion', -'LogMetric', -'LogSink', -'LogSplit', -'LogView', -'LoggingServiceV2Client', -'MetricsServiceV2Client', -'OperationState', -'Settings', -'TailLogEntriesRequest', -'TailLogEntriesResponse', -'UndeleteBucketRequest', -'UpdateBucketRequest', -'UpdateCmekSettingsRequest', -'UpdateExclusionRequest', -'UpdateLogMetricRequest', -'UpdateSettingsRequest', -'UpdateSinkRequest', -'UpdateViewRequest', -'WriteLogEntriesPartialErrors', -'WriteLogEntriesRequest', -'WriteLogEntriesResponse', + "ConfigServiceV2AsyncClient", + "LoggingServiceV2AsyncClient", + "MetricsServiceV2AsyncClient", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "ConfigServiceV2Client", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateLogMetricRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteLogMetricRequest", + "DeleteLogRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetLogMetricRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "IndexType", + "LifecycleState", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogExclusion", + "LogMetric", + "LogSink", + "LogSplit", + "LogView", + "LoggingServiceV2Client", + "MetricsServiceV2Client", + "OperationState", + "Settings", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateLogMetricRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py index 7c1b69fb603f..986f6a0c801c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import ConfigServiceV2Client from .async_client import ConfigServiceV2AsyncClient +from .client import ConfigServiceV2Client __all__ = ( - 'ConfigServiceV2Client', - 'ConfigServiceV2AsyncClient', + "ConfigServiceV2Client", + "ConfigServiceV2AsyncClient", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index ae8f44596626..84bec95f3a47 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -14,46 +14,58 @@ # limitations under the License. # import logging as std_logging -from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.logging_v2 import gapic_version as package_version +from collections import OrderedDict +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.logging_v2.services.config_service_v2 import pagers -from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore + from .client import ConfigServiceV2Client +from .transports.base import DEFAULT_CLIENT_INFO, ConfigServiceV2Transport +from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class ConfigServiceV2AsyncClient: """Service for configuring sinks used to route log entries.""" @@ -67,29 +79,47 @@ class ConfigServiceV2AsyncClient: _DEFAULT_UNIVERSE = ConfigServiceV2Client._DEFAULT_UNIVERSE cmek_settings_path = staticmethod(ConfigServiceV2Client.cmek_settings_path) - parse_cmek_settings_path = staticmethod(ConfigServiceV2Client.parse_cmek_settings_path) + parse_cmek_settings_path = staticmethod( + ConfigServiceV2Client.parse_cmek_settings_path + ) link_path = staticmethod(ConfigServiceV2Client.link_path) parse_link_path = staticmethod(ConfigServiceV2Client.parse_link_path) log_bucket_path = staticmethod(ConfigServiceV2Client.log_bucket_path) parse_log_bucket_path = staticmethod(ConfigServiceV2Client.parse_log_bucket_path) log_exclusion_path = staticmethod(ConfigServiceV2Client.log_exclusion_path) - parse_log_exclusion_path = staticmethod(ConfigServiceV2Client.parse_log_exclusion_path) + parse_log_exclusion_path = staticmethod( + ConfigServiceV2Client.parse_log_exclusion_path + ) log_sink_path = staticmethod(ConfigServiceV2Client.log_sink_path) parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) settings_path = staticmethod(ConfigServiceV2Client.settings_path) parse_settings_path = staticmethod(ConfigServiceV2Client.parse_settings_path) - common_billing_account_path = staticmethod(ConfigServiceV2Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(ConfigServiceV2Client.parse_common_billing_account_path) + common_billing_account_path = staticmethod( + ConfigServiceV2Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ConfigServiceV2Client.parse_common_billing_account_path + ) common_folder_path = staticmethod(ConfigServiceV2Client.common_folder_path) - parse_common_folder_path = staticmethod(ConfigServiceV2Client.parse_common_folder_path) - common_organization_path = staticmethod(ConfigServiceV2Client.common_organization_path) - parse_common_organization_path = staticmethod(ConfigServiceV2Client.parse_common_organization_path) + parse_common_folder_path = staticmethod( + ConfigServiceV2Client.parse_common_folder_path + ) + common_organization_path = staticmethod( + ConfigServiceV2Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + ConfigServiceV2Client.parse_common_organization_path + ) common_project_path = staticmethod(ConfigServiceV2Client.common_project_path) - parse_common_project_path = staticmethod(ConfigServiceV2Client.parse_common_project_path) + parse_common_project_path = staticmethod( + ConfigServiceV2Client.parse_common_project_path + ) common_location_path = staticmethod(ConfigServiceV2Client.common_location_path) - parse_common_location_path = staticmethod(ConfigServiceV2Client.parse_common_location_path) + parse_common_location_path = staticmethod( + ConfigServiceV2Client.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -131,7 +161,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -194,12 +226,18 @@ def universe_domain(self) -> str: get_transport_class = ConfigServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the config service v2 async client. Args: @@ -254,31 +292,39 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.ConfigServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.ConfigServiceV2", "credentialsType": None, - } + }, ) - async def list_buckets(self, - request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBucketsAsyncPager: + async def list_buckets( + self, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketsAsyncPager: r"""Lists log buckets. .. code-block:: python @@ -350,10 +396,14 @@ async def sample_list_buckets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -367,14 +417,14 @@ async def sample_list_buckets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_buckets] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_buckets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -402,13 +452,14 @@ async def sample_list_buckets(): # Done; return the response. return response - async def get_bucket(self, - request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def get_bucket( + self, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Gets a log bucket. .. code-block:: python @@ -462,14 +513,14 @@ async def sample_get_bucket(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_bucket] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_bucket + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -486,13 +537,14 @@ async def sample_get_bucket(): # Done; return the response. return response - async def create_bucket_async(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_bucket_async( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Creates a log bucket asynchronously that can be used to store log entries. After a bucket has been created, the bucket's location @@ -557,14 +609,14 @@ async def sample_create_bucket_async(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_bucket_async] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_bucket_async + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -589,13 +641,14 @@ async def sample_create_bucket_async(): # Done; return the response. return response - async def update_bucket_async(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_bucket_async( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Updates a log bucket asynchronously. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -662,14 +715,14 @@ async def sample_update_bucket_async(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_bucket_async] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_bucket_async + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -694,13 +747,14 @@ async def sample_update_bucket_async(): # Done; return the response. return response - async def create_bucket(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def create_bucket( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed. @@ -757,14 +811,14 @@ async def sample_create_bucket(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_bucket] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_bucket + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -781,13 +835,14 @@ async def sample_create_bucket(): # Done; return the response. return response - async def update_bucket(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def update_bucket( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -847,14 +902,14 @@ async def sample_update_bucket(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_bucket] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_bucket + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -871,13 +926,14 @@ async def sample_update_bucket(): # Done; return the response. return response - async def delete_bucket(self, - request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_bucket( + self, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a log bucket. Changes the bucket's ``lifecycle_state`` to the @@ -927,14 +983,14 @@ async def sample_delete_bucket(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_bucket] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_bucket + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -948,13 +1004,14 @@ async def sample_delete_bucket(): metadata=metadata, ) - async def undelete_bucket(self, - request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def undelete_bucket( + self, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 days. @@ -1001,14 +1058,14 @@ async def sample_undelete_bucket(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.undelete_bucket] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.undelete_bucket + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1022,14 +1079,15 @@ async def sample_undelete_bucket(): metadata=metadata, ) - async def list_views(self, - request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListViewsAsyncPager: + async def list_views( + self, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListViewsAsyncPager: r"""Lists views on a log bucket. .. code-block:: python @@ -1093,10 +1151,14 @@ async def sample_list_views(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1110,14 +1172,14 @@ async def sample_list_views(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_views] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_views + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1145,13 +1207,14 @@ async def sample_list_views(): # Done; return the response. return response - async def get_view(self, - request: Optional[Union[logging_config.GetViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def get_view( + self, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Gets a view on a log bucket.. .. code-block:: python @@ -1210,9 +1273,7 @@ async def sample_get_view(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1229,13 +1290,14 @@ async def sample_get_view(): # Done; return the response. return response - async def create_view(self, - request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def create_view( + self, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1291,14 +1353,14 @@ async def sample_create_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_view] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_view + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1315,13 +1377,14 @@ async def sample_create_view(): # Done; return the response. return response - async def update_view(self, - request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def update_view( + self, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new view: ``filter``. If an ``UNAVAILABLE`` error is returned, this @@ -1379,14 +1442,14 @@ async def sample_update_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_view] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_view + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1403,13 +1466,14 @@ async def sample_update_view(): # Done; return the response. return response - async def delete_view(self, - request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_view( + self, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it can delete the view. If this occurs, please try again in a few @@ -1457,14 +1521,14 @@ async def sample_delete_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_view] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_view + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1478,14 +1542,15 @@ async def sample_delete_view(): metadata=metadata, ) - async def list_sinks(self, - request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSinksAsyncPager: + async def list_sinks( + self, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. .. code-block:: python @@ -1552,10 +1617,14 @@ async def sample_list_sinks(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1569,14 +1638,14 @@ async def sample_list_sinks(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_sinks] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sinks + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1604,14 +1673,15 @@ async def sample_list_sinks(): # Done; return the response. return response - async def get_sink(self, - request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def get_sink( + self, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Gets a sink. .. code-block:: python @@ -1685,10 +1755,14 @@ async def sample_get_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1707,9 +1781,9 @@ async def sample_get_sink(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -1726,15 +1800,16 @@ async def sample_get_sink(): # Done; return the response. return response - async def create_sink(self, - request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def create_sink( + self, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's ``writer_identity`` is not @@ -1824,10 +1899,14 @@ async def sample_create_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, sink] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1843,14 +1922,14 @@ async def sample_create_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_sink] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_sink + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1867,16 +1946,17 @@ async def sample_create_sink(): # Done; return the response. return response - async def update_sink(self, - request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def update_sink( + self, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and ``filter``. @@ -1990,10 +2070,14 @@ async def sample_update_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name, sink, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2011,14 +2095,16 @@ async def sample_update_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_sink] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_sink + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -2035,14 +2121,15 @@ async def sample_update_sink(): # Done; return the response. return response - async def delete_sink(self, - request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_sink( + self, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2102,10 +2189,14 @@ async def sample_delete_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2119,14 +2210,16 @@ async def sample_delete_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_sink] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_sink + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -2140,16 +2233,17 @@ async def sample_delete_sink(): metadata=metadata, ) - async def create_link(self, - request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - link: Optional[logging_config.Link] = None, - link_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_link( + self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs stored in the log bucket. A log bucket may currently @@ -2237,10 +2331,14 @@ async def sample_create_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, link, link_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2258,14 +2356,14 @@ async def sample_create_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_link] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_link + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2290,14 +2388,15 @@ async def sample_create_link(): # Done; return the response. return response - async def delete_link(self, - request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_link( + self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2373,10 +2472,14 @@ async def sample_delete_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2390,14 +2493,14 @@ async def sample_delete_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_link] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_link + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2422,14 +2525,15 @@ async def sample_delete_link(): # Done; return the response. return response - async def list_links(self, - request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLinksAsyncPager: + async def list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLinksAsyncPager: r"""Lists links. .. code-block:: python @@ -2495,10 +2599,14 @@ async def sample_list_links(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2512,14 +2620,14 @@ async def sample_list_links(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_links] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_links + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2547,14 +2655,15 @@ async def sample_list_links(): # Done; return the response. return response - async def get_link(self, - request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Link: + async def get_link( + self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: r"""Gets a link. .. code-block:: python @@ -2615,10 +2724,14 @@ async def sample_get_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2637,9 +2750,7 @@ async def sample_get_link(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2656,14 +2767,15 @@ async def sample_get_link(): # Done; return the response. return response - async def list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListExclusionsAsyncPager: + async def list_exclusions( + self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -2731,10 +2843,14 @@ async def sample_list_exclusions(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2748,14 +2864,14 @@ async def sample_list_exclusions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_exclusions] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_exclusions + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2783,14 +2899,15 @@ async def sample_list_exclusions(): # Done; return the response. return response - async def get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def get_exclusion( + self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. .. code-block:: python @@ -2862,10 +2979,14 @@ async def sample_get_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2879,14 +3000,14 @@ async def sample_get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_exclusion] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2903,15 +3024,16 @@ async def sample_get_exclusion(): # Done; return the response. return response - async def create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, - *, - parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def create_exclusion( + self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. @@ -3000,10 +3122,14 @@ async def sample_create_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, exclusion] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3019,14 +3145,14 @@ async def sample_create_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_exclusion] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3043,16 +3169,17 @@ async def sample_create_exclusion(): # Done; return the response. return response - async def update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def update_exclusion( + self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3152,10 +3279,14 @@ async def sample_update_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, exclusion, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3173,14 +3304,14 @@ async def sample_update_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_exclusion] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3197,14 +3328,15 @@ async def sample_update_exclusion(): # Done; return the response. return response - async def delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_exclusion( + self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an exclusion in the \_Default sink. .. code-block:: python @@ -3263,10 +3395,14 @@ async def sample_delete_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3280,14 +3416,14 @@ async def sample_delete_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_exclusion] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3301,13 +3437,14 @@ async def sample_delete_exclusion(): metadata=metadata, ) - async def get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + async def get_cmek_settings( + self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. Note: CMEK for the Log Router can be configured for Google Cloud @@ -3385,14 +3522,14 @@ async def sample_get_cmek_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_cmek_settings] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_cmek_settings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3409,13 +3546,14 @@ async def sample_get_cmek_settings(): # Done; return the response. return response - async def update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + async def update_cmek_settings( + self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. Note: CMEK for the Log Router can currently only be configured @@ -3498,14 +3636,14 @@ async def sample_update_cmek_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_cmek_settings] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_cmek_settings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3522,14 +3660,15 @@ async def sample_update_cmek_settings(): # Done; return the response. return response - async def get_settings(self, - request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + async def get_settings( + self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. Note: Settings for the Log Router can be get for Google Cloud @@ -3619,10 +3758,14 @@ async def sample_get_settings(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3636,14 +3779,14 @@ async def sample_get_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_settings] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_settings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3660,15 +3803,16 @@ async def sample_get_settings(): # Done; return the response. return response - async def update_settings(self, - request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, - *, - settings: Optional[logging_config.Settings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + async def update_settings( + self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. Note: Settings for the Log Router can currently only be @@ -3765,10 +3909,14 @@ async def sample_update_settings(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [settings, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3784,14 +3932,14 @@ async def sample_update_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_settings] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_settings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3808,13 +3956,14 @@ async def sample_update_settings(): # Done; return the response. return response - async def copy_log_entries(self, - request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def copy_log_entries( + self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -3878,7 +4027,9 @@ async def sample_copy_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.copy_log_entries] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.copy_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -3944,8 +4095,7 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -3953,7 +4103,11 @@ async def list_operations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4000,8 +4154,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -4009,7 +4162,11 @@ async def get_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4059,15 +4216,19 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "ConfigServiceV2AsyncClient": return self @@ -4075,12 +4236,13 @@ async def __aenter__(self) -> "ConfigServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "ConfigServiceV2AsyncClient", -) +__all__ = ("ConfigServiceV2AsyncClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index c3e16949aad2..3e7261b32744 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -13,27 +13,38 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -from http import HTTPStatus import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) -from google.cloud.logging_v2 import gapic_version as package_version - +import google.protobuf from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -42,21 +53,23 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) -from google.cloud.logging_v2.services.config_service_v2 import pagers -from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore + +from .transports.base import DEFAULT_CLIENT_INFO, ConfigServiceV2Transport from .transports.grpc import ConfigServiceV2GrpcTransport from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport @@ -68,13 +81,15 @@ class ConfigServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] _transport_registry["grpc"] = ConfigServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[ConfigServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ConfigServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -150,14 +165,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -196,8 +213,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: ConfigServiceV2Client: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -214,139 +230,220 @@ def transport(self) -> ConfigServiceV2Transport: return self._transport @staticmethod - def cmek_settings_path(project: str,) -> str: + def cmek_settings_path( + project: str, + ) -> str: """Returns a fully-qualified cmek_settings string.""" - return "projects/{project}/cmekSettings".format(project=project, ) + return "projects/{project}/cmekSettings".format( + project=project, + ) @staticmethod - def parse_cmek_settings_path(path: str) -> Dict[str,str]: + def parse_cmek_settings_path(path: str) -> Dict[str, str]: """Parses a cmek_settings path into its component segments.""" m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) return m.groupdict() if m else {} @staticmethod - def link_path(project: str,location: str,bucket: str,link: str,) -> str: + def link_path( + project: str, + location: str, + bucket: str, + link: str, + ) -> str: """Returns a fully-qualified link string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format( + project=project, + location=location, + bucket=bucket, + link=link, + ) @staticmethod - def parse_link_path(path: str) -> Dict[str,str]: + def parse_link_path(path: str) -> Dict[str, str]: """Parses a link path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/links/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/links/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def log_bucket_path(project: str,location: str,bucket: str,) -> str: + def log_bucket_path( + project: str, + location: str, + bucket: str, + ) -> str: """Returns a fully-qualified log_bucket string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + return "projects/{project}/locations/{location}/buckets/{bucket}".format( + project=project, + location=location, + bucket=bucket, + ) @staticmethod - def parse_log_bucket_path(path: str) -> Dict[str,str]: + def parse_log_bucket_path(path: str) -> Dict[str, str]: """Parses a log_bucket path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def log_exclusion_path(project: str,exclusion: str,) -> str: + def log_exclusion_path( + project: str, + exclusion: str, + ) -> str: """Returns a fully-qualified log_exclusion string.""" - return "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + return "projects/{project}/exclusions/{exclusion}".format( + project=project, + exclusion=exclusion, + ) @staticmethod - def parse_log_exclusion_path(path: str) -> Dict[str,str]: + def parse_log_exclusion_path(path: str) -> Dict[str, str]: """Parses a log_exclusion path into its component segments.""" m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_sink_path(project: str,sink: str,) -> str: + def log_sink_path( + project: str, + sink: str, + ) -> str: """Returns a fully-qualified log_sink string.""" - return "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + return "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) @staticmethod - def parse_log_sink_path(path: str) -> Dict[str,str]: + def parse_log_sink_path(path: str) -> Dict[str, str]: """Parses a log_sink path into its component segments.""" m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_view_path(project: str,location: str,bucket: str,view: str,) -> str: + def log_view_path( + project: str, + location: str, + bucket: str, + view: str, + ) -> str: """Returns a fully-qualified log_view string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, + location=location, + bucket=bucket, + view=view, + ) @staticmethod - def parse_log_view_path(path: str) -> Dict[str,str]: + def parse_log_view_path(path: str) -> Dict[str, str]: """Parses a log_view path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def settings_path(project: str,) -> str: + def settings_path( + project: str, + ) -> str: """Returns a fully-qualified settings string.""" - return "projects/{project}/settings".format(project=project, ) + return "projects/{project}/settings".format( + project=project, + ) @staticmethod - def parse_settings_path(path: str) -> Dict[str,str]: + def parse_settings_path(path: str) -> Dict[str, str]: """Parses a settings path into its component segments.""" m = re.match(r"^projects/(?P.+?)/settings$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -378,14 +475,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = ConfigServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -398,7 +499,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -423,7 +526,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -446,7 +551,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint) -> str: + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: """Return the API endpoint used by the client. Args: @@ -462,17 +569,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -508,15 +623,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -549,12 +667,18 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the config service v2 client. Args: @@ -609,13 +733,21 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ConfigServiceV2Client._read_environment_variables() - self._client_cert_source = ConfigServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = ConfigServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + ConfigServiceV2Client._read_environment_variables() + ) + self._client_cert_source = ConfigServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ConfigServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. @@ -627,7 +759,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -636,30 +770,40 @@ def __init__(self, *, if transport_provided: # transport is a ConfigServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly." ) self._transport = cast(ConfigServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - ConfigServiceV2Client._get_api_endpoint( + self._api_endpoint = ( + self._api_endpoint + or ConfigServiceV2Client._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, - self._use_mtls_endpoint)) + self._use_mtls_endpoint, + ) + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport]] = ( + transport_init: Union[ + Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport] + ] = ( ConfigServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConfigServiceV2Transport], transport) @@ -678,28 +822,37 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.ConfigServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.ConfigServiceV2", "credentialsType": None, - } + }, ) - def list_buckets(self, - request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBucketsPager: + def list_buckets( + self, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketsPager: r"""Lists log buckets. .. code-block:: python @@ -771,10 +924,14 @@ def sample_list_buckets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -792,9 +949,7 @@ def sample_list_buckets(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -822,13 +977,14 @@ def sample_list_buckets(): # Done; return the response. return response - def get_bucket(self, - request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def get_bucket( + self, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Gets a log bucket. .. code-block:: python @@ -887,9 +1043,7 @@ def sample_get_bucket(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -906,13 +1060,14 @@ def sample_get_bucket(): # Done; return the response. return response - def create_bucket_async(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_bucket_async( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Creates a log bucket asynchronously that can be used to store log entries. After a bucket has been created, the bucket's location @@ -982,9 +1137,7 @@ def sample_create_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1009,13 +1162,14 @@ def sample_create_bucket_async(): # Done; return the response. return response - def update_bucket_async(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_bucket_async( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Updates a log bucket asynchronously. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -1087,9 +1241,7 @@ def sample_update_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1114,13 +1266,14 @@ def sample_update_bucket_async(): # Done; return the response. return response - def create_bucket(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def create_bucket( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed. @@ -1182,9 +1335,7 @@ def sample_create_bucket(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1201,13 +1352,14 @@ def sample_create_bucket(): # Done; return the response. return response - def update_bucket(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def update_bucket( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -1272,9 +1424,7 @@ def sample_update_bucket(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1291,13 +1441,14 @@ def sample_update_bucket(): # Done; return the response. return response - def delete_bucket(self, - request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_bucket( + self, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a log bucket. Changes the bucket's ``lifecycle_state`` to the @@ -1352,9 +1503,7 @@ def sample_delete_bucket(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1368,13 +1517,14 @@ def sample_delete_bucket(): metadata=metadata, ) - def undelete_bucket(self, - request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def undelete_bucket( + self, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 days. @@ -1426,9 +1576,7 @@ def sample_undelete_bucket(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1442,14 +1590,15 @@ def sample_undelete_bucket(): metadata=metadata, ) - def list_views(self, - request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListViewsPager: + def list_views( + self, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListViewsPager: r"""Lists views on a log bucket. .. code-block:: python @@ -1513,10 +1662,14 @@ def sample_list_views(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1534,9 +1687,7 @@ def sample_list_views(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1564,13 +1715,14 @@ def sample_list_views(): # Done; return the response. return response - def get_view(self, - request: Optional[Union[logging_config.GetViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def get_view( + self, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Gets a view on a log bucket.. .. code-block:: python @@ -1629,9 +1781,7 @@ def sample_get_view(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1648,13 +1798,14 @@ def sample_get_view(): # Done; return the response. return response - def create_view(self, - request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def create_view( + self, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1715,9 +1866,7 @@ def sample_create_view(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1734,13 +1883,14 @@ def sample_create_view(): # Done; return the response. return response - def update_view(self, - request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def update_view( + self, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new view: ``filter``. If an ``UNAVAILABLE`` error is returned, this @@ -1803,9 +1953,7 @@ def sample_update_view(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1822,13 +1970,14 @@ def sample_update_view(): # Done; return the response. return response - def delete_view(self, - request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_view( + self, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it can delete the view. If this occurs, please try again in a few @@ -1881,9 +2030,7 @@ def sample_delete_view(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1897,14 +2044,15 @@ def sample_delete_view(): metadata=metadata, ) - def list_sinks(self, - request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSinksPager: + def list_sinks( + self, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSinksPager: r"""Lists sinks. .. code-block:: python @@ -1971,10 +2119,14 @@ def sample_list_sinks(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1992,9 +2144,7 @@ def sample_list_sinks(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2022,14 +2172,15 @@ def sample_list_sinks(): # Done; return the response. return response - def get_sink(self, - request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def get_sink( + self, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Gets a sink. .. code-block:: python @@ -2103,10 +2254,14 @@ def sample_get_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2124,9 +2279,9 @@ def sample_get_sink(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -2143,15 +2298,16 @@ def sample_get_sink(): # Done; return the response. return response - def create_sink(self, - request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def create_sink( + self, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's ``writer_identity`` is not @@ -2241,10 +2397,14 @@ def sample_create_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, sink] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2264,9 +2424,7 @@ def sample_create_sink(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2283,16 +2441,17 @@ def sample_create_sink(): # Done; return the response. return response - def update_sink(self, - request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def update_sink( + self, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and ``filter``. @@ -2406,10 +2565,14 @@ def sample_update_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name, sink, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2431,9 +2594,9 @@ def sample_update_sink(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -2450,14 +2613,15 @@ def sample_update_sink(): # Done; return the response. return response - def delete_sink(self, - request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_sink( + self, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2517,10 +2681,14 @@ def sample_delete_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2538,9 +2706,9 @@ def sample_delete_sink(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -2554,16 +2722,17 @@ def sample_delete_sink(): metadata=metadata, ) - def create_link(self, - request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - link: Optional[logging_config.Link] = None, - link_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_link( + self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs stored in the log bucket. A log bucket may currently @@ -2651,10 +2820,14 @@ def sample_create_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, link, link_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2676,9 +2849,7 @@ def sample_create_link(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2703,14 +2874,15 @@ def sample_create_link(): # Done; return the response. return response - def delete_link(self, - request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_link( + self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2786,10 +2958,14 @@ def sample_delete_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2807,9 +2983,7 @@ def sample_delete_link(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2834,14 +3008,15 @@ def sample_delete_link(): # Done; return the response. return response - def list_links(self, - request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLinksPager: + def list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLinksPager: r"""Lists links. .. code-block:: python @@ -2907,10 +3082,14 @@ def sample_list_links(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2928,9 +3107,7 @@ def sample_list_links(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2958,14 +3135,15 @@ def sample_list_links(): # Done; return the response. return response - def get_link(self, - request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Link: + def get_link( + self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: r"""Gets a link. .. code-block:: python @@ -3026,10 +3204,14 @@ def sample_get_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3047,9 +3229,7 @@ def sample_get_link(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3066,14 +3246,15 @@ def sample_get_link(): # Done; return the response. return response - def list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListExclusionsPager: + def list_exclusions( + self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -3141,10 +3322,14 @@ def sample_list_exclusions(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3162,9 +3347,7 @@ def sample_list_exclusions(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3192,14 +3375,15 @@ def sample_list_exclusions(): # Done; return the response. return response - def get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def get_exclusion( + self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. .. code-block:: python @@ -3271,10 +3455,14 @@ def sample_get_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3292,9 +3480,7 @@ def sample_get_exclusion(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3311,15 +3497,16 @@ def sample_get_exclusion(): # Done; return the response. return response - def create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, - *, - parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def create_exclusion( + self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. @@ -3408,10 +3595,14 @@ def sample_create_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, exclusion] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3431,9 +3622,7 @@ def sample_create_exclusion(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3450,16 +3639,17 @@ def sample_create_exclusion(): # Done; return the response. return response - def update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def update_exclusion( + self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3559,10 +3749,14 @@ def sample_update_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, exclusion, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3584,9 +3778,7 @@ def sample_update_exclusion(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3603,14 +3795,15 @@ def sample_update_exclusion(): # Done; return the response. return response - def delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_exclusion( + self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an exclusion in the \_Default sink. .. code-block:: python @@ -3669,10 +3862,14 @@ def sample_delete_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3690,9 +3887,7 @@ def sample_delete_exclusion(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3706,13 +3901,14 @@ def sample_delete_exclusion(): metadata=metadata, ) - def get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + def get_cmek_settings( + self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. Note: CMEK for the Log Router can be configured for Google Cloud @@ -3795,9 +3991,7 @@ def sample_get_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3814,13 +4008,14 @@ def sample_get_cmek_settings(): # Done; return the response. return response - def update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + def update_cmek_settings( + self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. Note: CMEK for the Log Router can currently only be configured @@ -3908,9 +4103,7 @@ def sample_update_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3927,14 +4120,15 @@ def sample_update_cmek_settings(): # Done; return the response. return response - def get_settings(self, - request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + def get_settings( + self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. Note: Settings for the Log Router can be get for Google Cloud @@ -4024,10 +4218,14 @@ def sample_get_settings(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -4045,9 +4243,7 @@ def sample_get_settings(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -4064,15 +4260,16 @@ def sample_get_settings(): # Done; return the response. return response - def update_settings(self, - request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, - *, - settings: Optional[logging_config.Settings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + def update_settings( + self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. Note: Settings for the Log Router can currently only be @@ -4169,10 +4366,14 @@ def sample_update_settings(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [settings, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -4192,9 +4393,7 @@ def sample_update_settings(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -4211,13 +4410,14 @@ def sample_update_settings(): # Done; return the response. return response - def copy_log_entries(self, - request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def copy_log_entries( + self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -4360,8 +4560,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -4370,7 +4569,11 @@ def list_operations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4420,8 +4623,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -4430,7 +4632,11 @@ def get_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4483,27 +4689,26 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "ConfigServiceV2Client", -) +__all__ = ("ConfigServiceV2Client",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 1af6b54c9924..2083d0423914 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -13,13 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore @@ -44,14 +58,17 @@ class ListBucketsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListBucketsResponse], - request: logging_config.ListBucketsRequest, - response: logging_config.ListBucketsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListBucketsResponse], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -84,7 +101,12 @@ def pages(self) -> Iterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogBucket]: @@ -92,7 +114,7 @@ def __iter__(self) -> Iterator[logging_config.LogBucket]: yield from page.buckets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListBucketsAsyncPager: @@ -112,14 +134,17 @@ class ListBucketsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], - request: logging_config.ListBucketsRequest, - response: logging_config.ListBucketsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -152,8 +177,14 @@ async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: async def async_generator(): async for page in self.pages: @@ -163,7 +194,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListViewsPager: @@ -183,14 +214,17 @@ class ListViewsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListViewsResponse], - request: logging_config.ListViewsRequest, - response: logging_config.ListViewsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListViewsResponse], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -223,7 +257,12 @@ def pages(self) -> Iterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogView]: @@ -231,7 +270,7 @@ def __iter__(self) -> Iterator[logging_config.LogView]: yield from page.views def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListViewsAsyncPager: @@ -251,14 +290,17 @@ class ListViewsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListViewsResponse]], - request: logging_config.ListViewsRequest, - response: logging_config.ListViewsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListViewsResponse]], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -291,8 +333,14 @@ async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogView]: async def async_generator(): async for page in self.pages: @@ -302,7 +350,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSinksPager: @@ -322,14 +370,17 @@ class ListSinksPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListSinksResponse], - request: logging_config.ListSinksRequest, - response: logging_config.ListSinksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListSinksResponse], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -362,7 +413,12 @@ def pages(self) -> Iterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogSink]: @@ -370,7 +426,7 @@ def __iter__(self) -> Iterator[logging_config.LogSink]: yield from page.sinks def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSinksAsyncPager: @@ -390,14 +446,17 @@ class ListSinksAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListSinksResponse]], - request: logging_config.ListSinksRequest, - response: logging_config.ListSinksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListSinksResponse]], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -430,8 +489,14 @@ async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: async def async_generator(): async for page in self.pages: @@ -441,7 +506,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLinksPager: @@ -461,14 +526,17 @@ class ListLinksPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListLinksResponse], - request: logging_config.ListLinksRequest, - response: logging_config.ListLinksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListLinksResponse], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -501,7 +569,12 @@ def pages(self) -> Iterator[logging_config.ListLinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.Link]: @@ -509,7 +582,7 @@ def __iter__(self) -> Iterator[logging_config.Link]: yield from page.links def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLinksAsyncPager: @@ -529,14 +602,17 @@ class ListLinksAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListLinksResponse]], - request: logging_config.ListLinksRequest, - response: logging_config.ListLinksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListLinksResponse]], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -569,8 +645,14 @@ async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.Link]: async def async_generator(): async for page in self.pages: @@ -580,7 +662,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListExclusionsPager: @@ -600,14 +682,17 @@ class ListExclusionsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListExclusionsResponse], - request: logging_config.ListExclusionsRequest, - response: logging_config.ListExclusionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListExclusionsResponse], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -640,7 +725,12 @@ def pages(self) -> Iterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogExclusion]: @@ -648,7 +738,7 @@ def __iter__(self) -> Iterator[logging_config.LogExclusion]: yield from page.exclusions def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListExclusionsAsyncPager: @@ -668,14 +758,17 @@ class ListExclusionsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], - request: logging_config.ListExclusionsRequest, - response: logging_config.ListExclusionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -708,8 +801,14 @@ async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: async def async_generator(): async for page in self.pages: @@ -719,4 +818,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index cc3da21c119f..790c53bfdcdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -20,14 +20,13 @@ from .grpc import ConfigServiceV2GrpcTransport from .grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport - # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] -_transport_registry['grpc'] = ConfigServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = ConfigServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = ConfigServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport __all__ = ( - 'ConfigServiceV2Transport', - 'ConfigServiceV2GrpcTransport', - 'ConfigServiceV2GrpcAsyncIOTransport', + "ConfigServiceV2Transport", + "ConfigServiceV2GrpcTransport", + "ConfigServiceV2GrpcAsyncIOTransport", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index b3f67b0461bc..b8222d13fc5f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -16,23 +16,22 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.logging_v2 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core +import google.auth # type: ignore +import google.protobuf +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.cloud.logging_v2 import gapic_version as package_version from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -42,26 +41,27 @@ class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", ) - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -100,31 +100,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host self._wrapped_methods: Dict[Callable, Callable] = {} @@ -388,14 +400,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -405,291 +417,306 @@ def operations_client(self): raise NotImplementedError() @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - Union[ - logging_config.ListBucketsResponse, - Awaitable[logging_config.ListBucketsResponse] - ]]: + def list_buckets( + self, + ) -> Callable[ + [logging_config.ListBucketsRequest], + Union[ + logging_config.ListBucketsResponse, + Awaitable[logging_config.ListBucketsResponse], + ], + ]: raise NotImplementedError() @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def get_bucket( + self, + ) -> Callable[ + [logging_config.GetBucketRequest], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], + ]: raise NotImplementedError() @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_bucket_async( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_bucket_async( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def create_bucket( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], + ]: raise NotImplementedError() @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def update_bucket( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], + ]: raise NotImplementedError() @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_bucket( + self, + ) -> Callable[ + [logging_config.DeleteBucketRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def undelete_bucket( + self, + ) -> Callable[ + [logging_config.UndeleteBucketRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - Union[ - logging_config.ListViewsResponse, - Awaitable[logging_config.ListViewsResponse] - ]]: + def list_views( + self, + ) -> Callable[ + [logging_config.ListViewsRequest], + Union[ + logging_config.ListViewsResponse, + Awaitable[logging_config.ListViewsResponse], + ], + ]: raise NotImplementedError() @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def get_view( + self, + ) -> Callable[ + [logging_config.GetViewRequest], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], + ]: raise NotImplementedError() @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def create_view( + self, + ) -> Callable[ + [logging_config.CreateViewRequest], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], + ]: raise NotImplementedError() @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def update_view( + self, + ) -> Callable[ + [logging_config.UpdateViewRequest], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], + ]: raise NotImplementedError() @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_view( + self, + ) -> Callable[ + [logging_config.DeleteViewRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - Union[ - logging_config.ListSinksResponse, - Awaitable[logging_config.ListSinksResponse] - ]]: + def list_sinks( + self, + ) -> Callable[ + [logging_config.ListSinksRequest], + Union[ + logging_config.ListSinksResponse, + Awaitable[logging_config.ListSinksResponse], + ], + ]: raise NotImplementedError() @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def get_sink( + self, + ) -> Callable[ + [logging_config.GetSinkRequest], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], + ]: raise NotImplementedError() @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def create_sink( + self, + ) -> Callable[ + [logging_config.CreateSinkRequest], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], + ]: raise NotImplementedError() @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def update_sink( + self, + ) -> Callable[ + [logging_config.UpdateSinkRequest], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], + ]: raise NotImplementedError() @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_sink( + self, + ) -> Callable[ + [logging_config.DeleteSinkRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_link( + self, + ) -> Callable[ + [logging_config.CreateLinkRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_link( + self, + ) -> Callable[ + [logging_config.DeleteLinkRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - Union[ - logging_config.ListLinksResponse, - Awaitable[logging_config.ListLinksResponse] - ]]: + def list_links( + self, + ) -> Callable[ + [logging_config.ListLinksRequest], + Union[ + logging_config.ListLinksResponse, + Awaitable[logging_config.ListLinksResponse], + ], + ]: raise NotImplementedError() @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - Union[ - logging_config.Link, - Awaitable[logging_config.Link] - ]]: + def get_link( + self, + ) -> Callable[ + [logging_config.GetLinkRequest], + Union[logging_config.Link, Awaitable[logging_config.Link]], + ]: raise NotImplementedError() @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - Union[ - logging_config.ListExclusionsResponse, - Awaitable[logging_config.ListExclusionsResponse] - ]]: + def list_exclusions( + self, + ) -> Callable[ + [logging_config.ListExclusionsRequest], + Union[ + logging_config.ListExclusionsResponse, + Awaitable[logging_config.ListExclusionsResponse], + ], + ]: raise NotImplementedError() @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def get_exclusion( + self, + ) -> Callable[ + [logging_config.GetExclusionRequest], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], + ]: raise NotImplementedError() @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def create_exclusion( + self, + ) -> Callable[ + [logging_config.CreateExclusionRequest], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], + ]: raise NotImplementedError() @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def update_exclusion( + self, + ) -> Callable[ + [logging_config.UpdateExclusionRequest], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], + ]: raise NotImplementedError() @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_exclusion( + self, + ) -> Callable[ + [logging_config.DeleteExclusionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - Union[ - logging_config.CmekSettings, - Awaitable[logging_config.CmekSettings] - ]]: + def get_cmek_settings( + self, + ) -> Callable[ + [logging_config.GetCmekSettingsRequest], + Union[logging_config.CmekSettings, Awaitable[logging_config.CmekSettings]], + ]: raise NotImplementedError() @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - Union[ - logging_config.CmekSettings, - Awaitable[logging_config.CmekSettings] - ]]: + def update_cmek_settings( + self, + ) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Union[logging_config.CmekSettings, Awaitable[logging_config.CmekSettings]], + ]: raise NotImplementedError() @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - Union[ - logging_config.Settings, - Awaitable[logging_config.Settings] - ]]: + def get_settings( + self, + ) -> Callable[ + [logging_config.GetSettingsRequest], + Union[logging_config.Settings, Awaitable[logging_config.Settings]], + ]: raise NotImplementedError() @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - Union[ - logging_config.Settings, - Awaitable[logging_config.Settings] - ]]: + def update_settings( + self, + ) -> Callable[ + [logging_config.UpdateSettingsRequest], + Union[logging_config.Settings, Awaitable[logging_config.Settings]], + ]: raise NotImplementedError() @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def copy_log_entries( + self, + ) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property @@ -697,7 +724,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -724,6 +754,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'ConfigServiceV2Transport', -) +__all__ = ("ConfigServiceV2Transport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 200ae0f81db5..17dbcf73cadc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -19,25 +19,23 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson +import google.auth # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.message - import grpc # type: ignore import proto # type: ignore - +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson + +from .base import DEFAULT_CLIENT_INFO, ConfigServiceV2Transport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -47,7 +45,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -68,7 +68,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -79,7 +79,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -94,7 +98,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -116,23 +120,26 @@ class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -260,19 +267,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -308,13 +319,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -334,9 +344,11 @@ def operations_client(self) -> operations_v1.OperationsClient: return self._operations_client @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - logging_config.ListBucketsResponse]: + def list_buckets( + self, + ) -> Callable[ + [logging_config.ListBucketsRequest], logging_config.ListBucketsResponse + ]: r"""Return a callable for the list buckets method over gRPC. Lists log buckets. @@ -351,18 +363,18 @@ def list_buckets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListBuckets', + if "list_buckets" not in self._stubs: + self._stubs["list_buckets"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, ) - return self._stubs['list_buckets'] + return self._stubs["list_buckets"] @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - logging_config.LogBucket]: + def get_bucket( + self, + ) -> Callable[[logging_config.GetBucketRequest], logging_config.LogBucket]: r"""Return a callable for the get bucket method over gRPC. Gets a log bucket. @@ -377,18 +389,18 @@ def get_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetBucket', + if "get_bucket" not in self._stubs: + self._stubs["get_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['get_bucket'] + return self._stubs["get_bucket"] @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - operations_pb2.Operation]: + def create_bucket_async( + self, + ) -> Callable[[logging_config.CreateBucketRequest], operations_pb2.Operation]: r"""Return a callable for the create bucket async method over gRPC. Creates a log bucket asynchronously that can be used @@ -406,18 +418,18 @@ def create_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_bucket_async'] + return self._stubs["create_bucket_async"] @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - operations_pb2.Operation]: + def update_bucket_async( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], operations_pb2.Operation]: r"""Return a callable for the update bucket async method over gRPC. Updates a log bucket asynchronously. @@ -438,18 +450,18 @@ def update_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_bucket_async'] + return self._stubs["update_bucket_async"] @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - logging_config.LogBucket]: + def create_bucket( + self, + ) -> Callable[[logging_config.CreateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the create bucket method over gRPC. Creates a log bucket that can be used to store log @@ -466,18 +478,18 @@ def create_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucket', + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['create_bucket'] + return self._stubs["create_bucket"] @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - logging_config.LogBucket]: + def update_bucket( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the update bucket method over gRPC. Updates a log bucket. @@ -498,18 +510,18 @@ def update_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucket', + if "update_bucket" not in self._stubs: + self._stubs["update_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['update_bucket'] + return self._stubs["update_bucket"] @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - empty_pb2.Empty]: + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the delete bucket method over gRPC. Deletes a log bucket. @@ -529,18 +541,18 @@ def delete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteBucket', + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_bucket'] + return self._stubs["delete_bucket"] @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - empty_pb2.Empty]: + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a log bucket. A bucket that has been @@ -557,18 +569,18 @@ def undelete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['undelete_bucket'] + return self._stubs["undelete_bucket"] @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - logging_config.ListViewsResponse]: + def list_views( + self, + ) -> Callable[[logging_config.ListViewsRequest], logging_config.ListViewsResponse]: r"""Return a callable for the list views method over gRPC. Lists views on a log bucket. @@ -583,18 +595,18 @@ def list_views(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_views' not in self._stubs: - self._stubs['list_views'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListViews', + if "list_views" not in self._stubs: + self._stubs["list_views"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, ) - return self._stubs['list_views'] + return self._stubs["list_views"] @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - logging_config.LogView]: + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], logging_config.LogView]: r"""Return a callable for the get view method over gRPC. Gets a view on a log bucket.. @@ -609,18 +621,18 @@ def get_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_view' not in self._stubs: - self._stubs['get_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetView', + if "get_view" not in self._stubs: + self._stubs["get_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['get_view'] + return self._stubs["get_view"] @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - logging_config.LogView]: + def create_view( + self, + ) -> Callable[[logging_config.CreateViewRequest], logging_config.LogView]: r"""Return a callable for the create view method over gRPC. Creates a view over log entries in a log bucket. A @@ -636,18 +648,18 @@ def create_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_view' not in self._stubs: - self._stubs['create_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateView', + if "create_view" not in self._stubs: + self._stubs["create_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['create_view'] + return self._stubs["create_view"] @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - logging_config.LogView]: + def update_view( + self, + ) -> Callable[[logging_config.UpdateViewRequest], logging_config.LogView]: r"""Return a callable for the update view method over gRPC. Updates a view on a log bucket. This method replaces the @@ -666,18 +678,18 @@ def update_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_view' not in self._stubs: - self._stubs['update_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateView', + if "update_view" not in self._stubs: + self._stubs["update_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['update_view'] + return self._stubs["update_view"] @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - empty_pb2.Empty]: + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is @@ -695,18 +707,18 @@ def delete_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteView', + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_view'] + return self._stubs["delete_view"] @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - logging_config.ListSinksResponse]: + def list_sinks( + self, + ) -> Callable[[logging_config.ListSinksRequest], logging_config.ListSinksResponse]: r"""Return a callable for the list sinks method over gRPC. Lists sinks. @@ -721,18 +733,18 @@ def list_sinks(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', + if "list_sinks" not in self._stubs: + self._stubs["list_sinks"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, ) - return self._stubs['list_sinks'] + return self._stubs["list_sinks"] @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - logging_config.LogSink]: + def get_sink( + self, + ) -> Callable[[logging_config.GetSinkRequest], logging_config.LogSink]: r"""Return a callable for the get sink method over gRPC. Gets a sink. @@ -747,18 +759,18 @@ def get_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', + if "get_sink" not in self._stubs: + self._stubs["get_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['get_sink'] + return self._stubs["get_sink"] @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - logging_config.LogSink]: + def create_sink( + self, + ) -> Callable[[logging_config.CreateSinkRequest], logging_config.LogSink]: r"""Return a callable for the create sink method over gRPC. Creates a sink that exports specified log entries to a @@ -777,18 +789,18 @@ def create_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', + if "create_sink" not in self._stubs: + self._stubs["create_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['create_sink'] + return self._stubs["create_sink"] @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - logging_config.LogSink]: + def update_sink( + self, + ) -> Callable[[logging_config.UpdateSinkRequest], logging_config.LogSink]: r"""Return a callable for the update sink method over gRPC. Updates a sink. This method replaces the following fields in the @@ -808,18 +820,18 @@ def update_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', + if "update_sink" not in self._stubs: + self._stubs["update_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['update_sink'] + return self._stubs["update_sink"] @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - empty_pb2.Empty]: + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], empty_pb2.Empty]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -835,18 +847,18 @@ def delete_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', + if "delete_sink" not in self._stubs: + self._stubs["delete_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_sink'] + return self._stubs["delete_sink"] @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - operations_pb2.Operation]: + def create_link( + self, + ) -> Callable[[logging_config.CreateLinkRequest], operations_pb2.Operation]: r"""Return a callable for the create link method over gRPC. Asynchronously creates a linked dataset in BigQuery @@ -864,18 +876,18 @@ def create_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_link' not in self._stubs: - self._stubs['create_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateLink', + if "create_link" not in self._stubs: + self._stubs["create_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_link'] + return self._stubs["create_link"] @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - operations_pb2.Operation]: + def delete_link( + self, + ) -> Callable[[logging_config.DeleteLinkRequest], operations_pb2.Operation]: r"""Return a callable for the delete link method over gRPC. Deletes a link. This will also delete the @@ -891,18 +903,18 @@ def delete_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteLink', + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_link'] + return self._stubs["delete_link"] @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - logging_config.ListLinksResponse]: + def list_links( + self, + ) -> Callable[[logging_config.ListLinksRequest], logging_config.ListLinksResponse]: r"""Return a callable for the list links method over gRPC. Lists links. @@ -917,18 +929,18 @@ def list_links(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_links' not in self._stubs: - self._stubs['list_links'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListLinks', + if "list_links" not in self._stubs: + self._stubs["list_links"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, ) - return self._stubs['list_links'] + return self._stubs["list_links"] @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - logging_config.Link]: + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], logging_config.Link]: r"""Return a callable for the get link method over gRPC. Gets a link. @@ -943,18 +955,20 @@ def get_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_link' not in self._stubs: - self._stubs['get_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetLink', + if "get_link" not in self._stubs: + self._stubs["get_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, ) - return self._stubs['get_link'] + return self._stubs["get_link"] @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - logging_config.ListExclusionsResponse]: + def list_exclusions( + self, + ) -> Callable[ + [logging_config.ListExclusionsRequest], logging_config.ListExclusionsResponse + ]: r"""Return a callable for the list exclusions method over gRPC. Lists all the exclusions on the \_Default sink in a parent @@ -970,18 +984,18 @@ def list_exclusions(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', + if "list_exclusions" not in self._stubs: + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, ) - return self._stubs['list_exclusions'] + return self._stubs["list_exclusions"] @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - logging_config.LogExclusion]: + def get_exclusion( + self, + ) -> Callable[[logging_config.GetExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the get exclusion method over gRPC. Gets the description of an exclusion in the \_Default sink. @@ -996,18 +1010,18 @@ def get_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', + if "get_exclusion" not in self._stubs: + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['get_exclusion'] + return self._stubs["get_exclusion"] @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - logging_config.LogExclusion]: + def create_exclusion( + self, + ) -> Callable[[logging_config.CreateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the create exclusion method over gRPC. Creates a new exclusion in the \_Default sink in a specified @@ -1024,18 +1038,18 @@ def create_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', + if "create_exclusion" not in self._stubs: + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['create_exclusion'] + return self._stubs["create_exclusion"] @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - logging_config.LogExclusion]: + def update_exclusion( + self, + ) -> Callable[[logging_config.UpdateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the update exclusion method over gRPC. Changes one or more properties of an existing exclusion in the @@ -1051,18 +1065,18 @@ def update_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + if "update_exclusion" not in self._stubs: + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['update_exclusion'] + return self._stubs["update_exclusion"] @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - empty_pb2.Empty]: + def delete_exclusion( + self, + ) -> Callable[[logging_config.DeleteExclusionRequest], empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion in the \_Default sink. @@ -1077,18 +1091,18 @@ def delete_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + if "delete_exclusion" not in self._stubs: + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_exclusion'] + return self._stubs["delete_exclusion"] @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - logging_config.CmekSettings]: + def get_cmek_settings( + self, + ) -> Callable[[logging_config.GetCmekSettingsRequest], logging_config.CmekSettings]: r"""Return a callable for the get cmek settings method over gRPC. Gets the Logging CMEK settings for the given resource. @@ -1112,18 +1126,20 @@ def get_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + if "get_cmek_settings" not in self._stubs: + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['get_cmek_settings'] + return self._stubs["get_cmek_settings"] @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - logging_config.CmekSettings]: + def update_cmek_settings( + self, + ) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], logging_config.CmekSettings + ]: r"""Return a callable for the update cmek settings method over gRPC. Updates the Log Router CMEK settings for the given resource. @@ -1152,18 +1168,18 @@ def update_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + if "update_cmek_settings" not in self._stubs: + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['update_cmek_settings'] + return self._stubs["update_cmek_settings"] @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - logging_config.Settings]: + def get_settings( + self, + ) -> Callable[[logging_config.GetSettingsRequest], logging_config.Settings]: r"""Return a callable for the get settings method over gRPC. Gets the Log Router settings for the given resource. @@ -1188,18 +1204,18 @@ def get_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSettings', + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['get_settings'] + return self._stubs["get_settings"] @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - logging_config.Settings]: + def update_settings( + self, + ) -> Callable[[logging_config.UpdateSettingsRequest], logging_config.Settings]: r"""Return a callable for the update settings method over gRPC. Updates the Log Router settings for the given resource. @@ -1231,18 +1247,18 @@ def update_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSettings', + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['update_settings'] + return self._stubs["update_settings"] @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - operations_pb2.Operation]: + def copy_log_entries( + self, + ) -> Callable[[logging_config.CopyLogEntriesRequest], operations_pb2.Operation]: r"""Return a callable for the copy log entries method over gRPC. Copies a set of log entries from a log bucket to a @@ -1258,13 +1274,13 @@ def copy_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['copy_log_entries'] + return self._stubs["copy_log_entries"] def close(self): self._logged_channel.close() @@ -1273,8 +1289,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1291,8 +1306,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1308,9 +1322,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1328,6 +1343,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'ConfigServiceV2GrpcTransport', -) +__all__ = ("ConfigServiceV2GrpcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index f230fa2a74a9..7c8d93916b96 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -15,33 +15,31 @@ # import inspect import json -import pickle import logging as std_logging +import pickle import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, ConfigServiceV2Transport from .grpc import ConfigServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -49,9 +47,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -72,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -83,7 +85,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -98,7 +104,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -125,13 +131,15 @@ class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -162,24 +170,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -309,7 +319,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -340,9 +352,12 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: return self._operations_client @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - Awaitable[logging_config.ListBucketsResponse]]: + def list_buckets( + self, + ) -> Callable[ + [logging_config.ListBucketsRequest], + Awaitable[logging_config.ListBucketsResponse], + ]: r"""Return a callable for the list buckets method over gRPC. Lists log buckets. @@ -357,18 +372,20 @@ def list_buckets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListBuckets', + if "list_buckets" not in self._stubs: + self._stubs["list_buckets"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, ) - return self._stubs['list_buckets'] + return self._stubs["list_buckets"] @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - Awaitable[logging_config.LogBucket]]: + def get_bucket( + self, + ) -> Callable[ + [logging_config.GetBucketRequest], Awaitable[logging_config.LogBucket] + ]: r"""Return a callable for the get bucket method over gRPC. Gets a log bucket. @@ -383,18 +400,20 @@ def get_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetBucket', + if "get_bucket" not in self._stubs: + self._stubs["get_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['get_bucket'] + return self._stubs["get_bucket"] @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - Awaitable[operations_pb2.Operation]]: + def create_bucket_async( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the create bucket async method over gRPC. Creates a log bucket asynchronously that can be used @@ -412,18 +431,20 @@ def create_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_bucket_async'] + return self._stubs["create_bucket_async"] @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Awaitable[operations_pb2.Operation]]: + def update_bucket_async( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the update bucket async method over gRPC. Updates a log bucket asynchronously. @@ -444,18 +465,20 @@ def update_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_bucket_async'] + return self._stubs["update_bucket_async"] @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - Awaitable[logging_config.LogBucket]]: + def create_bucket( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], Awaitable[logging_config.LogBucket] + ]: r"""Return a callable for the create bucket method over gRPC. Creates a log bucket that can be used to store log @@ -472,18 +495,20 @@ def create_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucket', + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['create_bucket'] + return self._stubs["create_bucket"] @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Awaitable[logging_config.LogBucket]]: + def update_bucket( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], Awaitable[logging_config.LogBucket] + ]: r"""Return a callable for the update bucket method over gRPC. Updates a log bucket. @@ -504,18 +529,18 @@ def update_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucket', + if "update_bucket" not in self._stubs: + self._stubs["update_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['update_bucket'] + return self._stubs["update_bucket"] @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - Awaitable[empty_pb2.Empty]]: + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete bucket method over gRPC. Deletes a log bucket. @@ -535,18 +560,18 @@ def delete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteBucket', + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_bucket'] + return self._stubs["delete_bucket"] @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - Awaitable[empty_pb2.Empty]]: + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a log bucket. A bucket that has been @@ -563,18 +588,20 @@ def undelete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['undelete_bucket'] + return self._stubs["undelete_bucket"] @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - Awaitable[logging_config.ListViewsResponse]]: + def list_views( + self, + ) -> Callable[ + [logging_config.ListViewsRequest], Awaitable[logging_config.ListViewsResponse] + ]: r"""Return a callable for the list views method over gRPC. Lists views on a log bucket. @@ -589,18 +616,18 @@ def list_views(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_views' not in self._stubs: - self._stubs['list_views'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListViews', + if "list_views" not in self._stubs: + self._stubs["list_views"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, ) - return self._stubs['list_views'] + return self._stubs["list_views"] @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - Awaitable[logging_config.LogView]]: + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the get view method over gRPC. Gets a view on a log bucket.. @@ -615,18 +642,20 @@ def get_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_view' not in self._stubs: - self._stubs['get_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetView', + if "get_view" not in self._stubs: + self._stubs["get_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['get_view'] + return self._stubs["get_view"] @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - Awaitable[logging_config.LogView]]: + def create_view( + self, + ) -> Callable[ + [logging_config.CreateViewRequest], Awaitable[logging_config.LogView] + ]: r"""Return a callable for the create view method over gRPC. Creates a view over log entries in a log bucket. A @@ -642,18 +671,20 @@ def create_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_view' not in self._stubs: - self._stubs['create_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateView', + if "create_view" not in self._stubs: + self._stubs["create_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['create_view'] + return self._stubs["create_view"] @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - Awaitable[logging_config.LogView]]: + def update_view( + self, + ) -> Callable[ + [logging_config.UpdateViewRequest], Awaitable[logging_config.LogView] + ]: r"""Return a callable for the update view method over gRPC. Updates a view on a log bucket. This method replaces the @@ -672,18 +703,18 @@ def update_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_view' not in self._stubs: - self._stubs['update_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateView', + if "update_view" not in self._stubs: + self._stubs["update_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['update_view'] + return self._stubs["update_view"] @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - Awaitable[empty_pb2.Empty]]: + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is @@ -701,18 +732,20 @@ def delete_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteView', + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_view'] + return self._stubs["delete_view"] @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - Awaitable[logging_config.ListSinksResponse]]: + def list_sinks( + self, + ) -> Callable[ + [logging_config.ListSinksRequest], Awaitable[logging_config.ListSinksResponse] + ]: r"""Return a callable for the list sinks method over gRPC. Lists sinks. @@ -727,18 +760,18 @@ def list_sinks(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', + if "list_sinks" not in self._stubs: + self._stubs["list_sinks"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, ) - return self._stubs['list_sinks'] + return self._stubs["list_sinks"] @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - Awaitable[logging_config.LogSink]]: + def get_sink( + self, + ) -> Callable[[logging_config.GetSinkRequest], Awaitable[logging_config.LogSink]]: r"""Return a callable for the get sink method over gRPC. Gets a sink. @@ -753,18 +786,20 @@ def get_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', + if "get_sink" not in self._stubs: + self._stubs["get_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['get_sink'] + return self._stubs["get_sink"] @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - Awaitable[logging_config.LogSink]]: + def create_sink( + self, + ) -> Callable[ + [logging_config.CreateSinkRequest], Awaitable[logging_config.LogSink] + ]: r"""Return a callable for the create sink method over gRPC. Creates a sink that exports specified log entries to a @@ -783,18 +818,20 @@ def create_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', + if "create_sink" not in self._stubs: + self._stubs["create_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['create_sink'] + return self._stubs["create_sink"] @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - Awaitable[logging_config.LogSink]]: + def update_sink( + self, + ) -> Callable[ + [logging_config.UpdateSinkRequest], Awaitable[logging_config.LogSink] + ]: r"""Return a callable for the update sink method over gRPC. Updates a sink. This method replaces the following fields in the @@ -814,18 +851,18 @@ def update_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', + if "update_sink" not in self._stubs: + self._stubs["update_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['update_sink'] + return self._stubs["update_sink"] @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - Awaitable[empty_pb2.Empty]]: + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -841,18 +878,20 @@ def delete_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', + if "delete_sink" not in self._stubs: + self._stubs["delete_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_sink'] + return self._stubs["delete_sink"] @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - Awaitable[operations_pb2.Operation]]: + def create_link( + self, + ) -> Callable[ + [logging_config.CreateLinkRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the create link method over gRPC. Asynchronously creates a linked dataset in BigQuery @@ -870,18 +909,20 @@ def create_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_link' not in self._stubs: - self._stubs['create_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateLink', + if "create_link" not in self._stubs: + self._stubs["create_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_link'] + return self._stubs["create_link"] @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - Awaitable[operations_pb2.Operation]]: + def delete_link( + self, + ) -> Callable[ + [logging_config.DeleteLinkRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the delete link method over gRPC. Deletes a link. This will also delete the @@ -897,18 +938,20 @@ def delete_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteLink', + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_link'] + return self._stubs["delete_link"] @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - Awaitable[logging_config.ListLinksResponse]]: + def list_links( + self, + ) -> Callable[ + [logging_config.ListLinksRequest], Awaitable[logging_config.ListLinksResponse] + ]: r"""Return a callable for the list links method over gRPC. Lists links. @@ -923,18 +966,18 @@ def list_links(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_links' not in self._stubs: - self._stubs['list_links'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListLinks', + if "list_links" not in self._stubs: + self._stubs["list_links"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, ) - return self._stubs['list_links'] + return self._stubs["list_links"] @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - Awaitable[logging_config.Link]]: + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], Awaitable[logging_config.Link]]: r"""Return a callable for the get link method over gRPC. Gets a link. @@ -949,18 +992,21 @@ def get_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_link' not in self._stubs: - self._stubs['get_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetLink', + if "get_link" not in self._stubs: + self._stubs["get_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, ) - return self._stubs['get_link'] + return self._stubs["get_link"] @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - Awaitable[logging_config.ListExclusionsResponse]]: + def list_exclusions( + self, + ) -> Callable[ + [logging_config.ListExclusionsRequest], + Awaitable[logging_config.ListExclusionsResponse], + ]: r"""Return a callable for the list exclusions method over gRPC. Lists all the exclusions on the \_Default sink in a parent @@ -976,18 +1022,20 @@ def list_exclusions(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', + if "list_exclusions" not in self._stubs: + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, ) - return self._stubs['list_exclusions'] + return self._stubs["list_exclusions"] @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def get_exclusion( + self, + ) -> Callable[ + [logging_config.GetExclusionRequest], Awaitable[logging_config.LogExclusion] + ]: r"""Return a callable for the get exclusion method over gRPC. Gets the description of an exclusion in the \_Default sink. @@ -1002,18 +1050,20 @@ def get_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', + if "get_exclusion" not in self._stubs: + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['get_exclusion'] + return self._stubs["get_exclusion"] @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def create_exclusion( + self, + ) -> Callable[ + [logging_config.CreateExclusionRequest], Awaitable[logging_config.LogExclusion] + ]: r"""Return a callable for the create exclusion method over gRPC. Creates a new exclusion in the \_Default sink in a specified @@ -1030,18 +1080,20 @@ def create_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', + if "create_exclusion" not in self._stubs: + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['create_exclusion'] + return self._stubs["create_exclusion"] @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def update_exclusion( + self, + ) -> Callable[ + [logging_config.UpdateExclusionRequest], Awaitable[logging_config.LogExclusion] + ]: r"""Return a callable for the update exclusion method over gRPC. Changes one or more properties of an existing exclusion in the @@ -1057,18 +1109,18 @@ def update_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + if "update_exclusion" not in self._stubs: + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['update_exclusion'] + return self._stubs["update_exclusion"] @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - Awaitable[empty_pb2.Empty]]: + def delete_exclusion( + self, + ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion in the \_Default sink. @@ -1083,18 +1135,20 @@ def delete_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + if "delete_exclusion" not in self._stubs: + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_exclusion'] + return self._stubs["delete_exclusion"] @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - Awaitable[logging_config.CmekSettings]]: + def get_cmek_settings( + self, + ) -> Callable[ + [logging_config.GetCmekSettingsRequest], Awaitable[logging_config.CmekSettings] + ]: r"""Return a callable for the get cmek settings method over gRPC. Gets the Logging CMEK settings for the given resource. @@ -1118,18 +1172,21 @@ def get_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + if "get_cmek_settings" not in self._stubs: + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['get_cmek_settings'] + return self._stubs["get_cmek_settings"] @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - Awaitable[logging_config.CmekSettings]]: + def update_cmek_settings( + self, + ) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Awaitable[logging_config.CmekSettings], + ]: r"""Return a callable for the update cmek settings method over gRPC. Updates the Log Router CMEK settings for the given resource. @@ -1158,18 +1215,20 @@ def update_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + if "update_cmek_settings" not in self._stubs: + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['update_cmek_settings'] + return self._stubs["update_cmek_settings"] @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - Awaitable[logging_config.Settings]]: + def get_settings( + self, + ) -> Callable[ + [logging_config.GetSettingsRequest], Awaitable[logging_config.Settings] + ]: r"""Return a callable for the get settings method over gRPC. Gets the Log Router settings for the given resource. @@ -1194,18 +1253,20 @@ def get_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSettings', + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['get_settings'] + return self._stubs["get_settings"] @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - Awaitable[logging_config.Settings]]: + def update_settings( + self, + ) -> Callable[ + [logging_config.UpdateSettingsRequest], Awaitable[logging_config.Settings] + ]: r"""Return a callable for the update settings method over gRPC. Updates the Log Router settings for the given resource. @@ -1237,18 +1298,20 @@ def update_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSettings', + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['update_settings'] + return self._stubs["update_settings"] @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - Awaitable[operations_pb2.Operation]]: + def copy_log_entries( + self, + ) -> Callable[ + [logging_config.CopyLogEntriesRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the copy log entries method over gRPC. Copies a set of log entries from a log bucket to a @@ -1264,16 +1327,16 @@ def copy_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['copy_log_entries'] + return self._stubs["copy_log_entries"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_buckets: self._wrap_method( self.list_buckets, @@ -1545,8 +1608,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1563,8 +1625,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1580,9 +1641,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1596,6 +1658,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'ConfigServiceV2GrpcAsyncIOTransport', -) +__all__ = ("ConfigServiceV2GrpcAsyncIOTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py index d9820f09067b..4bf59c378463 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import LoggingServiceV2Client from .async_client import LoggingServiceV2AsyncClient +from .client import LoggingServiceV2Client __all__ = ( - 'LoggingServiceV2Client', - 'LoggingServiceV2AsyncClient', + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 075976b453a5..411196ea9ebb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -14,43 +14,57 @@ # limitations under the License. # import logging as std_logging -from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union - -from google.cloud.logging_v2 import gapic_version as package_version +from collections import OrderedDict +from typing import ( + AsyncIterable, + AsyncIterator, + Awaitable, + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.logging_v2.services.logging_service_v2 import pagers -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore -from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry, logging +from google.longrunning import operations_pb2 # type: ignore + from .client import LoggingServiceV2Client +from .transports.base import DEFAULT_CLIENT_INFO, LoggingServiceV2Transport +from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class LoggingServiceV2AsyncClient: """Service for ingesting and querying logs.""" @@ -65,16 +79,30 @@ class LoggingServiceV2AsyncClient: log_path = staticmethod(LoggingServiceV2Client.log_path) parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) - common_billing_account_path = staticmethod(LoggingServiceV2Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(LoggingServiceV2Client.parse_common_billing_account_path) + common_billing_account_path = staticmethod( + LoggingServiceV2Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + LoggingServiceV2Client.parse_common_billing_account_path + ) common_folder_path = staticmethod(LoggingServiceV2Client.common_folder_path) - parse_common_folder_path = staticmethod(LoggingServiceV2Client.parse_common_folder_path) - common_organization_path = staticmethod(LoggingServiceV2Client.common_organization_path) - parse_common_organization_path = staticmethod(LoggingServiceV2Client.parse_common_organization_path) + parse_common_folder_path = staticmethod( + LoggingServiceV2Client.parse_common_folder_path + ) + common_organization_path = staticmethod( + LoggingServiceV2Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + LoggingServiceV2Client.parse_common_organization_path + ) common_project_path = staticmethod(LoggingServiceV2Client.common_project_path) - parse_common_project_path = staticmethod(LoggingServiceV2Client.parse_common_project_path) + parse_common_project_path = staticmethod( + LoggingServiceV2Client.parse_common_project_path + ) common_location_path = staticmethod(LoggingServiceV2Client.common_location_path) - parse_common_location_path = staticmethod(LoggingServiceV2Client.parse_common_location_path) + parse_common_location_path = staticmethod( + LoggingServiceV2Client.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -116,7 +144,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -179,12 +209,18 @@ def universe_domain(self) -> str: get_transport_class = LoggingServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the logging service v2 async client. Args: @@ -239,31 +275,39 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.LoggingServiceV2", "credentialsType": None, - } + }, ) - async def delete_log(self, - request: Optional[Union[logging.DeleteLogRequest, dict]] = None, - *, - log_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_log( + self, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, + *, + log_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be @@ -326,10 +370,14 @@ async def sample_delete_log(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [log_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -343,14 +391,14 @@ async def sample_delete_log(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_log] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_log + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("log_name", request.log_name), - )), + gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)), ) # Validate the universe domain. @@ -364,17 +412,18 @@ async def sample_delete_log(): metadata=metadata, ) - async def write_log_entries(self, - request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, - *, - log_name: Optional[str] = None, - resource: Optional[monitored_resource_pb2.MonitoredResource] = None, - labels: Optional[MutableMapping[str, str]] = None, - entries: Optional[MutableSequence[log_entry.LogEntry]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging.WriteLogEntriesResponse: + async def write_log_entries( + self, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, + *, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is used, directly or indirectly, by the Logging agent @@ -517,10 +566,14 @@ async def sample_write_log_entries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [log_name, resource, labels, entries] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -541,7 +594,9 @@ async def sample_write_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.write_log_entries] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.write_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -557,16 +612,17 @@ async def sample_write_log_entries(): # Done; return the response. return response - async def list_log_entries(self, - request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, - *, - resource_names: Optional[MutableSequence[str]] = None, - filter: Optional[str] = None, - order_by: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogEntriesAsyncPager: + async def list_log_entries( + self, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, + *, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogEntriesAsyncPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. For ways to export log entries, see `Exporting @@ -669,10 +725,14 @@ async def sample_list_log_entries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [resource_names, filter, order_by] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -690,7 +750,9 @@ async def sample_list_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_log_entries] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -717,13 +779,16 @@ async def sample_list_log_entries(): # Done; return the response. return response - async def list_monitored_resource_descriptors(self, - request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: + async def list_monitored_resource_descriptors( + self, + request: Optional[ + Union[logging.ListMonitoredResourceDescriptorsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -782,7 +847,9 @@ async def sample_list_monitored_resource_descriptors(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_monitored_resource_descriptors] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_monitored_resource_descriptors + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -809,14 +876,15 @@ async def sample_list_monitored_resource_descriptors(): # Done; return the response. return response - async def list_logs(self, - request: Optional[Union[logging.ListLogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogsAsyncPager: + async def list_logs( + self, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogsAsyncPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. @@ -883,10 +951,14 @@ async def sample_list_logs(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -900,14 +972,14 @@ async def sample_list_logs(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_logs] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_logs + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -935,13 +1007,14 @@ async def sample_list_logs(): # Done; return the response. return response - def tail_log_entries(self, - requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: + def tail_log_entries( + self, + requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading logs. @@ -1001,7 +1074,9 @@ def request_generator(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.tail_log_entries] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.tail_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -1059,8 +1134,7 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1068,7 +1142,11 @@ async def list_operations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1115,8 +1193,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1124,7 +1201,11 @@ async def get_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1174,15 +1255,19 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "LoggingServiceV2AsyncClient": return self @@ -1190,12 +1275,13 @@ async def __aenter__(self) -> "LoggingServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "LoggingServiceV2AsyncClient", -) +__all__ = ("LoggingServiceV2AsyncClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index ea794ebe3670..1343d497918a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -13,27 +13,40 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -from http import HTTPStatus import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Iterable, + Iterator, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) -from google.cloud.logging_v2 import gapic_version as package_version - +import google.protobuf from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -42,18 +55,19 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) -from google.cloud.logging_v2.services.logging_service_v2 import pagers -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore -from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry, logging +from google.longrunning import operations_pb2 # type: ignore + +from .transports.base import DEFAULT_CLIENT_INFO, LoggingServiceV2Transport from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport @@ -65,13 +79,15 @@ class LoggingServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] _transport_registry["grpc"] = LoggingServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[LoggingServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[LoggingServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -147,14 +163,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -193,8 +211,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: LoggingServiceV2Client: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -211,73 +228,103 @@ def transport(self) -> LoggingServiceV2Transport: return self._transport @staticmethod - def log_path(project: str,log: str,) -> str: + def log_path( + project: str, + log: str, + ) -> str: """Returns a fully-qualified log string.""" - return "projects/{project}/logs/{log}".format(project=project, log=log, ) + return "projects/{project}/logs/{log}".format( + project=project, + log=log, + ) @staticmethod - def parse_log_path(path: str) -> Dict[str,str]: + def parse_log_path(path: str) -> Dict[str, str]: """Parses a log path into its component segments.""" m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -309,14 +356,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = LoggingServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -329,7 +380,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -354,7 +407,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -377,7 +432,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint) -> str: + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: """Return the API endpoint used by the client. Args: @@ -393,17 +450,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -439,15 +504,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -480,12 +548,18 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the logging service v2 client. Args: @@ -540,13 +614,21 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = LoggingServiceV2Client._read_environment_variables() - self._client_cert_source = LoggingServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = LoggingServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + LoggingServiceV2Client._read_environment_variables() + ) + self._client_cert_source = LoggingServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = LoggingServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. @@ -558,7 +640,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -567,30 +651,41 @@ def __init__(self, *, if transport_provided: # transport is a LoggingServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly." ) self._transport = cast(LoggingServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - LoggingServiceV2Client._get_api_endpoint( + self._api_endpoint = ( + self._api_endpoint + or LoggingServiceV2Client._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, - self._use_mtls_endpoint)) + self._use_mtls_endpoint, + ) + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[LoggingServiceV2Transport], Callable[..., LoggingServiceV2Transport]] = ( + transport_init: Union[ + Type[LoggingServiceV2Transport], + Callable[..., LoggingServiceV2Transport], + ] = ( LoggingServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LoggingServiceV2Transport], transport) @@ -609,28 +704,37 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.LoggingServiceV2", "credentialsType": None, - } + }, ) - def delete_log(self, - request: Optional[Union[logging.DeleteLogRequest, dict]] = None, - *, - log_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_log( + self, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, + *, + log_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be @@ -693,10 +797,14 @@ def sample_delete_log(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [log_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -714,9 +822,7 @@ def sample_delete_log(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("log_name", request.log_name), - )), + gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)), ) # Validate the universe domain. @@ -730,17 +836,18 @@ def sample_delete_log(): metadata=metadata, ) - def write_log_entries(self, - request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, - *, - log_name: Optional[str] = None, - resource: Optional[monitored_resource_pb2.MonitoredResource] = None, - labels: Optional[MutableMapping[str, str]] = None, - entries: Optional[MutableSequence[log_entry.LogEntry]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging.WriteLogEntriesResponse: + def write_log_entries( + self, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, + *, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is used, directly or indirectly, by the Logging agent @@ -883,10 +990,14 @@ def sample_write_log_entries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [log_name, resource, labels, entries] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -921,16 +1032,17 @@ def sample_write_log_entries(): # Done; return the response. return response - def list_log_entries(self, - request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, - *, - resource_names: Optional[MutableSequence[str]] = None, - filter: Optional[str] = None, - order_by: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogEntriesPager: + def list_log_entries( + self, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, + *, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogEntriesPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. For ways to export log entries, see `Exporting @@ -1033,10 +1145,14 @@ def sample_list_log_entries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [resource_names, filter, order_by] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1080,13 +1196,16 @@ def sample_list_log_entries(): # Done; return the response. return response - def list_monitored_resource_descriptors(self, - request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsPager: + def list_monitored_resource_descriptors( + self, + request: Optional[ + Union[logging.ListMonitoredResourceDescriptorsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -1145,7 +1264,9 @@ def sample_list_monitored_resource_descriptors(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_monitored_resource_descriptors] + rpc = self._transport._wrapped_methods[ + self._transport.list_monitored_resource_descriptors + ] # Validate the universe domain. self._validate_universe_domain() @@ -1172,14 +1293,15 @@ def sample_list_monitored_resource_descriptors(): # Done; return the response. return response - def list_logs(self, - request: Optional[Union[logging.ListLogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogsPager: + def list_logs( + self, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogsPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. @@ -1246,10 +1368,14 @@ def sample_list_logs(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1267,9 +1393,7 @@ def sample_list_logs(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1297,13 +1421,14 @@ def sample_list_logs(): # Done; return the response. return response - def tail_log_entries(self, - requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[logging.TailLogEntriesResponse]: + def tail_log_entries( + self, + requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[logging.TailLogEntriesResponse]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading logs. @@ -1434,8 +1559,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1444,7 +1568,11 @@ def list_operations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1494,8 +1622,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1504,7 +1631,11 @@ def get_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1557,27 +1688,26 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "LoggingServiceV2Client", -) +__all__ = ("LoggingServiceV2Client",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 84028a18c37b..10d84af1bab6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -13,20 +13,33 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.types import log_entry, logging class ListLogEntriesPager: @@ -46,14 +59,17 @@ class ListLogEntriesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListLogEntriesResponse], - request: logging.ListLogEntriesRequest, - response: logging.ListLogEntriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListLogEntriesResponse], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -86,7 +102,12 @@ def pages(self) -> Iterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[log_entry.LogEntry]: @@ -94,7 +115,7 @@ def __iter__(self) -> Iterator[log_entry.LogEntry]: yield from page.entries def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogEntriesAsyncPager: @@ -114,14 +135,17 @@ class ListLogEntriesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], - request: logging.ListLogEntriesRequest, - response: logging.ListLogEntriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -154,8 +178,14 @@ async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: async def async_generator(): async for page in self.pages: @@ -165,7 +195,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListMonitoredResourceDescriptorsPager: @@ -185,14 +215,17 @@ class ListMonitoredResourceDescriptorsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], - request: logging.ListMonitoredResourceDescriptorsRequest, - response: logging.ListMonitoredResourceDescriptorsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -225,7 +258,12 @@ def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]: @@ -233,7 +271,7 @@ def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescripto yield from page.resource_descriptors def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListMonitoredResourceDescriptorsAsyncPager: @@ -253,14 +291,19 @@ class ListMonitoredResourceDescriptorsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse]], - request: logging.ListMonitoredResourceDescriptorsRequest, - response: logging.ListMonitoredResourceDescriptorsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[ + ..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse] + ], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -289,13 +332,23 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsResponse]: + async def pages( + self, + ) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __aiter__(self) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: + + def __aiter__( + self, + ) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): async for page in self.pages: for response in page.resource_descriptors: @@ -304,7 +357,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogsPager: @@ -324,14 +377,17 @@ class ListLogsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListLogsResponse], - request: logging.ListLogsRequest, - response: logging.ListLogsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListLogsResponse], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -364,7 +420,12 @@ def pages(self) -> Iterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[str]: @@ -372,7 +433,7 @@ def __iter__(self) -> Iterator[str]: yield from page.log_names def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogsAsyncPager: @@ -392,14 +453,17 @@ class ListLogsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListLogsResponse]], - request: logging.ListLogsRequest, - response: logging.ListLogsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListLogsResponse]], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -432,8 +496,14 @@ async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[str]: async def async_generator(): async for page in self.pages: @@ -443,4 +513,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 25058513ec9e..880f7e966313 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -20,14 +20,13 @@ from .grpc import LoggingServiceV2GrpcTransport from .grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport - # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] -_transport_registry['grpc'] = LoggingServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = LoggingServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = LoggingServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport __all__ = ( - 'LoggingServiceV2Transport', - 'LoggingServiceV2GrpcTransport', - 'LoggingServiceV2GrpcAsyncIOTransport', + "LoggingServiceV2Transport", + "LoggingServiceV2GrpcTransport", + "LoggingServiceV2GrpcAsyncIOTransport", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 3de330db3029..5e447e3011cf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -16,22 +16,22 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.logging_v2 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core +import google.auth # type: ignore +import google.protobuf +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.cloud.logging_v2 import gapic_version as package_version from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -41,27 +41,28 @@ class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", ) - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -100,31 +101,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host self._wrapped_methods: Dict[Callable, Callable] = {} @@ -247,69 +260,77 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_log( + self, + ) -> Callable[ + [logging.DeleteLogRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: raise NotImplementedError() @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - Union[ - logging.WriteLogEntriesResponse, - Awaitable[logging.WriteLogEntriesResponse] - ]]: + def write_log_entries( + self, + ) -> Callable[ + [logging.WriteLogEntriesRequest], + Union[ + logging.WriteLogEntriesResponse, Awaitable[logging.WriteLogEntriesResponse] + ], + ]: raise NotImplementedError() @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - Union[ - logging.ListLogEntriesResponse, - Awaitable[logging.ListLogEntriesResponse] - ]]: + def list_log_entries( + self, + ) -> Callable[ + [logging.ListLogEntriesRequest], + Union[ + logging.ListLogEntriesResponse, Awaitable[logging.ListLogEntriesResponse] + ], + ]: raise NotImplementedError() @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - Union[ - logging.ListMonitoredResourceDescriptorsResponse, - Awaitable[logging.ListMonitoredResourceDescriptorsResponse] - ]]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Union[ + logging.ListMonitoredResourceDescriptorsResponse, + Awaitable[logging.ListMonitoredResourceDescriptorsResponse], + ], + ]: raise NotImplementedError() @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - Union[ - logging.ListLogsResponse, - Awaitable[logging.ListLogsResponse] - ]]: + def list_logs( + self, + ) -> Callable[ + [logging.ListLogsRequest], + Union[logging.ListLogsResponse, Awaitable[logging.ListLogsResponse]], + ]: raise NotImplementedError() @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - Union[ - logging.TailLogEntriesResponse, - Awaitable[logging.TailLogEntriesResponse] - ]]: + def tail_log_entries( + self, + ) -> Callable[ + [logging.TailLogEntriesRequest], + Union[ + logging.TailLogEntriesResponse, Awaitable[logging.TailLogEntriesResponse] + ], + ]: raise NotImplementedError() @property @@ -317,7 +338,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -344,6 +368,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'LoggingServiceV2Transport', -) +__all__ = ("LoggingServiceV2Transport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index a255116e4f26..10cef76171f6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -19,24 +19,23 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson +import google.auth # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.message - import grpc # type: ignore import proto # type: ignore - +from google.api_core import gapic_v1, grpc_helpers +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson + +from .base import DEFAULT_CLIENT_INFO, LoggingServiceV2Transport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -46,7 +45,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -67,7 +68,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -78,7 +79,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -93,7 +98,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -115,23 +120,26 @@ class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -258,19 +266,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -306,19 +318,16 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - empty_pb2.Empty]: + def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log for the \_Default Log @@ -337,18 +346,18 @@ def delete_log(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', + if "delete_log" not in self._stubs: + self._stubs["delete_log"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log'] + return self._stubs["delete_log"] @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - logging.WriteLogEntriesResponse]: + def write_log_entries( + self, + ) -> Callable[[logging.WriteLogEntriesRequest], logging.WriteLogEntriesResponse]: r"""Return a callable for the write log entries method over gRPC. Writes log entries to Logging. This API method is the @@ -369,18 +378,18 @@ def write_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + if "write_log_entries" not in self._stubs: + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, ) - return self._stubs['write_log_entries'] + return self._stubs["write_log_entries"] @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - logging.ListLogEntriesResponse]: + def list_log_entries( + self, + ) -> Callable[[logging.ListLogEntriesRequest], logging.ListLogEntriesResponse]: r"""Return a callable for the list log entries method over gRPC. Lists log entries. Use this method to retrieve log entries that @@ -398,18 +407,21 @@ def list_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', + if "list_log_entries" not in self._stubs: + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, ) - return self._stubs['list_log_entries'] + return self._stubs["list_log_entries"] @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - logging.ListMonitoredResourceDescriptorsResponse]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + logging.ListMonitoredResourceDescriptorsResponse, + ]: r"""Return a callable for the list monitored resource descriptors method over gRPC. @@ -426,18 +438,20 @@ def list_monitored_resource_descriptors(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', - request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, - response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + if "list_monitored_resource_descriptors" not in self._stubs: + self._stubs["list_monitored_resource_descriptors"] = ( + self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) ) - return self._stubs['list_monitored_resource_descriptors'] + return self._stubs["list_monitored_resource_descriptors"] @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - logging.ListLogsResponse]: + def list_logs( + self, + ) -> Callable[[logging.ListLogsRequest], logging.ListLogsResponse]: r"""Return a callable for the list logs method over gRPC. Lists the logs in projects, organizations, folders, @@ -454,18 +468,18 @@ def list_logs(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', + if "list_logs" not in self._stubs: + self._stubs["list_logs"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, ) - return self._stubs['list_logs'] + return self._stubs["list_logs"] @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - logging.TailLogEntriesResponse]: + def tail_log_entries( + self, + ) -> Callable[[logging.TailLogEntriesRequest], logging.TailLogEntriesResponse]: r"""Return a callable for the tail log entries method over gRPC. Streaming read of log entries as they are ingested. @@ -482,13 +496,13 @@ def tail_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( - '/google.logging.v2.LoggingServiceV2/TailLogEntries', + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, ) - return self._stubs['tail_log_entries'] + return self._stubs["tail_log_entries"] def close(self): self._logged_channel.close() @@ -497,8 +511,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -515,8 +528,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -532,9 +544,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -552,6 +565,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'LoggingServiceV2GrpcTransport', -) +__all__ = ("LoggingServiceV2GrpcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 50ae005fe396..474407c56bd1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -15,32 +15,31 @@ # import inspect import json -import pickle import logging as std_logging +import pickle import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, LoggingServiceV2Transport from .grpc import LoggingServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -48,9 +47,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -71,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -82,7 +85,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -97,7 +104,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -124,13 +131,15 @@ class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -161,24 +170,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -307,7 +318,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -322,9 +335,9 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - Awaitable[empty_pb2.Empty]]: + def delete_log( + self, + ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log for the \_Default Log @@ -343,18 +356,20 @@ def delete_log(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', + if "delete_log" not in self._stubs: + self._stubs["delete_log"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log'] + return self._stubs["delete_log"] @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - Awaitable[logging.WriteLogEntriesResponse]]: + def write_log_entries( + self, + ) -> Callable[ + [logging.WriteLogEntriesRequest], Awaitable[logging.WriteLogEntriesResponse] + ]: r"""Return a callable for the write log entries method over gRPC. Writes log entries to Logging. This API method is the @@ -375,18 +390,20 @@ def write_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + if "write_log_entries" not in self._stubs: + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, ) - return self._stubs['write_log_entries'] + return self._stubs["write_log_entries"] @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - Awaitable[logging.ListLogEntriesResponse]]: + def list_log_entries( + self, + ) -> Callable[ + [logging.ListLogEntriesRequest], Awaitable[logging.ListLogEntriesResponse] + ]: r"""Return a callable for the list log entries method over gRPC. Lists log entries. Use this method to retrieve log entries that @@ -404,18 +421,21 @@ def list_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', + if "list_log_entries" not in self._stubs: + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, ) - return self._stubs['list_log_entries'] + return self._stubs["list_log_entries"] @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - Awaitable[logging.ListMonitoredResourceDescriptorsResponse]]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Awaitable[logging.ListMonitoredResourceDescriptorsResponse], + ]: r"""Return a callable for the list monitored resource descriptors method over gRPC. @@ -432,18 +452,20 @@ def list_monitored_resource_descriptors(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', - request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, - response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + if "list_monitored_resource_descriptors" not in self._stubs: + self._stubs["list_monitored_resource_descriptors"] = ( + self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) ) - return self._stubs['list_monitored_resource_descriptors'] + return self._stubs["list_monitored_resource_descriptors"] @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - Awaitable[logging.ListLogsResponse]]: + def list_logs( + self, + ) -> Callable[[logging.ListLogsRequest], Awaitable[logging.ListLogsResponse]]: r"""Return a callable for the list logs method over gRPC. Lists the logs in projects, organizations, folders, @@ -460,18 +482,20 @@ def list_logs(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', + if "list_logs" not in self._stubs: + self._stubs["list_logs"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, ) - return self._stubs['list_logs'] + return self._stubs["list_logs"] @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - Awaitable[logging.TailLogEntriesResponse]]: + def tail_log_entries( + self, + ) -> Callable[ + [logging.TailLogEntriesRequest], Awaitable[logging.TailLogEntriesResponse] + ]: r"""Return a callable for the tail log entries method over gRPC. Streaming read of log entries as they are ingested. @@ -488,16 +512,16 @@ def tail_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( - '/google.logging.v2.LoggingServiceV2/TailLogEntries', + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, ) - return self._stubs['tail_log_entries'] + return self._stubs["tail_log_entries"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.delete_log: self._wrap_method( self.delete_log, @@ -628,8 +652,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -646,8 +669,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -663,9 +685,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -679,6 +702,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'LoggingServiceV2GrpcAsyncIOTransport', -) +__all__ = ("LoggingServiceV2GrpcAsyncIOTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index 50d894b6003c..5e23af6bce2c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import MetricsServiceV2Client from .async_client import MetricsServiceV2AsyncClient +from .client import MetricsServiceV2Client __all__ = ( - 'MetricsServiceV2Client', - 'MetricsServiceV2AsyncClient', + "MetricsServiceV2Client", + "MetricsServiceV2AsyncClient", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 198825c57efb..4756b6abf198 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -14,44 +14,56 @@ # limitations under the License. # import logging as std_logging -from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.logging_v2 import gapic_version as package_version +from collections import OrderedDict +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.logging_v2.services.metrics_service_v2 import pagers -from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore import google.api.distribution_pb2 as distribution_pb2 # type: ignore import google.api.metric_pb2 as metric_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore + from .client import MetricsServiceV2Client +from .transports.base import DEFAULT_CLIENT_INFO, MetricsServiceV2Transport +from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class MetricsServiceV2AsyncClient: """Service for configuring logs-based metrics.""" @@ -66,16 +78,30 @@ class MetricsServiceV2AsyncClient: log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path) parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path) - common_billing_account_path = staticmethod(MetricsServiceV2Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(MetricsServiceV2Client.parse_common_billing_account_path) + common_billing_account_path = staticmethod( + MetricsServiceV2Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MetricsServiceV2Client.parse_common_billing_account_path + ) common_folder_path = staticmethod(MetricsServiceV2Client.common_folder_path) - parse_common_folder_path = staticmethod(MetricsServiceV2Client.parse_common_folder_path) - common_organization_path = staticmethod(MetricsServiceV2Client.common_organization_path) - parse_common_organization_path = staticmethod(MetricsServiceV2Client.parse_common_organization_path) + parse_common_folder_path = staticmethod( + MetricsServiceV2Client.parse_common_folder_path + ) + common_organization_path = staticmethod( + MetricsServiceV2Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + MetricsServiceV2Client.parse_common_organization_path + ) common_project_path = staticmethod(MetricsServiceV2Client.common_project_path) - parse_common_project_path = staticmethod(MetricsServiceV2Client.parse_common_project_path) + parse_common_project_path = staticmethod( + MetricsServiceV2Client.parse_common_project_path + ) common_location_path = staticmethod(MetricsServiceV2Client.common_location_path) - parse_common_location_path = staticmethod(MetricsServiceV2Client.parse_common_location_path) + parse_common_location_path = staticmethod( + MetricsServiceV2Client.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -117,7 +143,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -180,12 +208,18 @@ def universe_domain(self) -> str: get_transport_class = MetricsServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the metrics service v2 async client. Args: @@ -240,31 +274,39 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.MetricsServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.MetricsServiceV2", "credentialsType": None, - } + }, ) - async def list_log_metrics(self, - request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogMetricsAsyncPager: + async def list_log_metrics( + self, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. .. code-block:: python @@ -329,10 +371,14 @@ async def sample_list_log_metrics(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -346,14 +392,14 @@ async def sample_list_log_metrics(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_log_metrics] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_log_metrics + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -381,14 +427,15 @@ async def sample_list_log_metrics(): # Done; return the response. return response - async def get_log_metric(self, - request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def get_log_metric( + self, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. .. code-block:: python @@ -458,10 +505,14 @@ async def sample_get_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -475,14 +526,16 @@ async def sample_get_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_log_metric] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -499,15 +552,16 @@ async def sample_get_log_metric(): # Done; return the response. return response - async def create_log_metric(self, - request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, - *, - parent: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def create_log_metric( + self, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, + *, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. .. code-block:: python @@ -593,10 +647,14 @@ async def sample_create_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, metric] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -612,14 +670,14 @@ async def sample_create_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_log_metric] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -636,15 +694,16 @@ async def sample_create_log_metric(): # Done; return the response. return response - async def update_log_metric(self, - request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def update_log_metric( + self, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. .. code-block:: python @@ -729,10 +788,14 @@ async def sample_update_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name, metric] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -748,14 +811,16 @@ async def sample_update_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_log_metric] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -772,14 +837,15 @@ async def sample_update_log_metric(): # Done; return the response. return response - async def delete_log_metric(self, - request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_log_metric( + self, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a logs-based metric. .. code-block:: python @@ -830,10 +896,14 @@ async def sample_delete_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -847,14 +917,16 @@ async def sample_delete_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_log_metric] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -910,8 +982,7 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -919,7 +990,11 @@ async def list_operations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -966,8 +1041,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -975,7 +1049,11 @@ async def get_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1025,15 +1103,19 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "MetricsServiceV2AsyncClient": return self @@ -1041,12 +1123,13 @@ async def __aenter__(self) -> "MetricsServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "MetricsServiceV2AsyncClient", -) +__all__ = ("MetricsServiceV2AsyncClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index cb17f9cf26e8..b4f53109c79c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -13,27 +13,38 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -from http import HTTPStatus import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) -from google.cloud.logging_v2 import gapic_version as package_version - +import google.protobuf from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -42,19 +53,21 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) -from google.cloud.logging_v2.services.metrics_service_v2 import pagers -from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore import google.api.distribution_pb2 as distribution_pb2 # type: ignore import google.api.metric_pb2 as metric_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore + +from .transports.base import DEFAULT_CLIENT_INFO, MetricsServiceV2Transport from .transports.grpc import MetricsServiceV2GrpcTransport from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport @@ -66,13 +79,15 @@ class MetricsServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] _transport_registry["grpc"] = MetricsServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MetricsServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MetricsServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -148,14 +163,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -194,8 +211,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: MetricsServiceV2Client: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -212,73 +228,103 @@ def transport(self) -> MetricsServiceV2Transport: return self._transport @staticmethod - def log_metric_path(project: str,metric: str,) -> str: + def log_metric_path( + project: str, + metric: str, + ) -> str: """Returns a fully-qualified log_metric string.""" - return "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + return "projects/{project}/metrics/{metric}".format( + project=project, + metric=metric, + ) @staticmethod - def parse_log_metric_path(path: str) -> Dict[str,str]: + def parse_log_metric_path(path: str) -> Dict[str, str]: """Parses a log_metric path into its component segments.""" m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -310,14 +356,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = MetricsServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -330,7 +380,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -355,7 +407,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -378,7 +432,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint) -> str: + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: """Return the API endpoint used by the client. Args: @@ -394,17 +450,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -440,15 +504,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -481,12 +548,18 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the metrics service v2 client. Args: @@ -541,13 +614,21 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MetricsServiceV2Client._read_environment_variables() - self._client_cert_source = MetricsServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = MetricsServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + MetricsServiceV2Client._read_environment_variables() + ) + self._client_cert_source = MetricsServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = MetricsServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. @@ -559,7 +640,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -568,30 +651,41 @@ def __init__(self, *, if transport_provided: # transport is a MetricsServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly." ) self._transport = cast(MetricsServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - MetricsServiceV2Client._get_api_endpoint( + self._api_endpoint = ( + self._api_endpoint + or MetricsServiceV2Client._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, - self._use_mtls_endpoint)) + self._use_mtls_endpoint, + ) + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[MetricsServiceV2Transport], Callable[..., MetricsServiceV2Transport]] = ( + transport_init: Union[ + Type[MetricsServiceV2Transport], + Callable[..., MetricsServiceV2Transport], + ] = ( MetricsServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetricsServiceV2Transport], transport) @@ -610,28 +704,37 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.MetricsServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.MetricsServiceV2", "credentialsType": None, - } + }, ) - def list_log_metrics(self, - request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogMetricsPager: + def list_log_metrics( + self, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. .. code-block:: python @@ -696,10 +799,14 @@ def sample_list_log_metrics(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -717,9 +824,7 @@ def sample_list_log_metrics(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -747,14 +852,15 @@ def sample_list_log_metrics(): # Done; return the response. return response - def get_log_metric(self, - request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def get_log_metric( + self, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. .. code-block:: python @@ -824,10 +930,14 @@ def sample_get_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -845,9 +955,9 @@ def sample_get_log_metric(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -864,15 +974,16 @@ def sample_get_log_metric(): # Done; return the response. return response - def create_log_metric(self, - request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, - *, - parent: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def create_log_metric( + self, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, + *, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. .. code-block:: python @@ -958,10 +1069,14 @@ def sample_create_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, metric] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -981,9 +1096,7 @@ def sample_create_log_metric(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1000,15 +1113,16 @@ def sample_create_log_metric(): # Done; return the response. return response - def update_log_metric(self, - request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def update_log_metric( + self, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. .. code-block:: python @@ -1093,10 +1207,14 @@ def sample_update_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name, metric] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1116,9 +1234,9 @@ def sample_update_log_metric(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -1135,14 +1253,15 @@ def sample_update_log_metric(): # Done; return the response. return response - def delete_log_metric(self, - request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_log_metric( + self, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a logs-based metric. .. code-block:: python @@ -1193,10 +1312,14 @@ def sample_delete_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1214,9 +1337,9 @@ def sample_delete_log_metric(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -1285,8 +1408,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1295,7 +1417,11 @@ def list_operations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1345,8 +1471,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1355,7 +1480,11 @@ def get_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1408,27 +1537,26 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "MetricsServiceV2Client", -) +__all__ = ("MetricsServiceV2Client",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 3d44cf6e4c67..e296aebedbf8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -13,13 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore @@ -44,14 +58,17 @@ class ListLogMetricsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_metrics.ListLogMetricsResponse], - request: logging_metrics.ListLogMetricsRequest, - response: logging_metrics.ListLogMetricsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_metrics.ListLogMetricsResponse], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -84,7 +101,12 @@ def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_metrics.LogMetric]: @@ -92,7 +114,7 @@ def __iter__(self) -> Iterator[logging_metrics.LogMetric]: yield from page.metrics def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogMetricsAsyncPager: @@ -112,14 +134,17 @@ class ListLogMetricsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], - request: logging_metrics.ListLogMetricsRequest, - response: logging_metrics.ListLogMetricsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -152,8 +177,14 @@ async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: async def async_generator(): async for page in self.pages: @@ -163,4 +194,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index a6eb39e80fa0..10e38acd8596 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -20,14 +20,13 @@ from .grpc import MetricsServiceV2GrpcTransport from .grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport - # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] -_transport_registry['grpc'] = MetricsServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = MetricsServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = MetricsServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport __all__ = ( - 'MetricsServiceV2Transport', - 'MetricsServiceV2GrpcTransport', - 'MetricsServiceV2GrpcAsyncIOTransport', + "MetricsServiceV2Transport", + "MetricsServiceV2GrpcTransport", + "MetricsServiceV2GrpcAsyncIOTransport", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index f919287841e7..f9198cdeccd9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -16,22 +16,22 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.logging_v2 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core +import google.auth # type: ignore +import google.protobuf +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.cloud.logging_v2 import gapic_version as package_version from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -41,27 +41,28 @@ class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", ) - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -100,31 +101,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host self._wrapped_methods: Dict[Callable, Callable] = {} @@ -220,60 +233,63 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - Union[ - logging_metrics.ListLogMetricsResponse, - Awaitable[logging_metrics.ListLogMetricsResponse] - ]]: + def list_log_metrics( + self, + ) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Union[ + logging_metrics.ListLogMetricsResponse, + Awaitable[logging_metrics.ListLogMetricsResponse], + ], + ]: raise NotImplementedError() @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def get_log_metric( + self, + ) -> Callable[ + [logging_metrics.GetLogMetricRequest], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], + ]: raise NotImplementedError() @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def create_log_metric( + self, + ) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], + ]: raise NotImplementedError() @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def update_log_metric( + self, + ) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], + ]: raise NotImplementedError() @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_log_metric( + self, + ) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property @@ -281,7 +297,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -308,6 +327,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'MetricsServiceV2Transport', -) +__all__ = ("MetricsServiceV2Transport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 0594c56f0aee..a1059882a3d6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -19,24 +19,23 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson +import google.auth # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.message - import grpc # type: ignore import proto # type: ignore - +from google.api_core import gapic_v1, grpc_helpers +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson + +from .base import DEFAULT_CLIENT_INFO, MetricsServiceV2Transport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -46,7 +45,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -67,7 +68,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -78,7 +79,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -93,7 +98,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -115,23 +120,26 @@ class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -258,19 +266,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -306,19 +318,20 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - logging_metrics.ListLogMetricsResponse]: + def list_log_metrics( + self, + ) -> Callable[ + [logging_metrics.ListLogMetricsRequest], logging_metrics.ListLogMetricsResponse + ]: r"""Return a callable for the list log metrics method over gRPC. Lists logs-based metrics. @@ -333,18 +346,18 @@ def list_log_metrics(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + if "list_log_metrics" not in self._stubs: + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, ) - return self._stubs['list_log_metrics'] + return self._stubs["list_log_metrics"] @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - logging_metrics.LogMetric]: + def get_log_metric( + self, + ) -> Callable[[logging_metrics.GetLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the get log metric method over gRPC. Gets a logs-based metric. @@ -359,18 +372,18 @@ def get_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', + if "get_log_metric" not in self._stubs: + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['get_log_metric'] + return self._stubs["get_log_metric"] @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - logging_metrics.LogMetric]: + def create_log_metric( + self, + ) -> Callable[[logging_metrics.CreateLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the create log metric method over gRPC. Creates a logs-based metric. @@ -385,18 +398,18 @@ def create_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + if "create_log_metric" not in self._stubs: + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['create_log_metric'] + return self._stubs["create_log_metric"] @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - logging_metrics.LogMetric]: + def update_log_metric( + self, + ) -> Callable[[logging_metrics.UpdateLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the update log metric method over gRPC. Creates or updates a logs-based metric. @@ -411,18 +424,18 @@ def update_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + if "update_log_metric" not in self._stubs: + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['update_log_metric'] + return self._stubs["update_log_metric"] @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - empty_pb2.Empty]: + def delete_log_metric( + self, + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty_pb2.Empty]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -437,13 +450,13 @@ def delete_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + if "delete_log_metric" not in self._stubs: + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log_metric'] + return self._stubs["delete_log_metric"] def close(self): self._logged_channel.close() @@ -452,8 +465,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -470,8 +482,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -487,9 +498,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -507,6 +519,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'MetricsServiceV2GrpcTransport', -) +__all__ = ("MetricsServiceV2GrpcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index a87425f25f28..7963363d29fa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -15,32 +15,31 @@ # import inspect import json -import pickle import logging as std_logging +import pickle import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, MetricsServiceV2Transport from .grpc import MetricsServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -48,9 +47,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -71,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -82,7 +85,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -97,7 +104,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -124,13 +131,15 @@ class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -161,24 +170,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -307,7 +318,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -322,9 +335,12 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - Awaitable[logging_metrics.ListLogMetricsResponse]]: + def list_log_metrics( + self, + ) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Awaitable[logging_metrics.ListLogMetricsResponse], + ]: r"""Return a callable for the list log metrics method over gRPC. Lists logs-based metrics. @@ -339,18 +355,20 @@ def list_log_metrics(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + if "list_log_metrics" not in self._stubs: + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, ) - return self._stubs['list_log_metrics'] + return self._stubs["list_log_metrics"] @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def get_log_metric( + self, + ) -> Callable[ + [logging_metrics.GetLogMetricRequest], Awaitable[logging_metrics.LogMetric] + ]: r"""Return a callable for the get log metric method over gRPC. Gets a logs-based metric. @@ -365,18 +383,20 @@ def get_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', + if "get_log_metric" not in self._stubs: + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['get_log_metric'] + return self._stubs["get_log_metric"] @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def create_log_metric( + self, + ) -> Callable[ + [logging_metrics.CreateLogMetricRequest], Awaitable[logging_metrics.LogMetric] + ]: r"""Return a callable for the create log metric method over gRPC. Creates a logs-based metric. @@ -391,18 +411,20 @@ def create_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + if "create_log_metric" not in self._stubs: + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['create_log_metric'] + return self._stubs["create_log_metric"] @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def update_log_metric( + self, + ) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], Awaitable[logging_metrics.LogMetric] + ]: r"""Return a callable for the update log metric method over gRPC. Creates or updates a logs-based metric. @@ -417,18 +439,18 @@ def update_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + if "update_log_metric" not in self._stubs: + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['update_log_metric'] + return self._stubs["update_log_metric"] @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - Awaitable[empty_pb2.Empty]]: + def delete_log_metric( + self, + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -443,16 +465,16 @@ def delete_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + if "delete_log_metric" not in self._stubs: + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log_metric'] + return self._stubs["delete_log_metric"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_log_metrics: self._wrap_method( self.list_log_metrics, @@ -556,8 +578,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -574,8 +595,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -591,9 +611,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -607,6 +628,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'MetricsServiceV2GrpcAsyncIOTransport', -) +__all__ = ("MetricsServiceV2GrpcAsyncIOTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py index 3023b14aa8ae..8519243a0d2a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py @@ -59,6 +59,8 @@ GetSinkRequest, GetViewRequest, IndexConfig, + IndexType, + LifecycleState, Link, LinkMetadata, ListBucketsRequest, @@ -76,6 +78,7 @@ LogExclusion, LogSink, LogView, + OperationState, Settings, UndeleteBucketRequest, UpdateBucketRequest, @@ -84,9 +87,6 @@ UpdateSettingsRequest, UpdateSinkRequest, UpdateViewRequest, - IndexType, - LifecycleState, - OperationState, ) from .logging_metrics import ( CreateLogMetricRequest, @@ -99,80 +99,80 @@ ) __all__ = ( - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', - 'DeleteLogRequest', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'BigQueryDataset', - 'BigQueryOptions', - 'BucketMetadata', - 'CmekSettings', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesRequest', - 'CopyLogEntriesResponse', - 'CreateBucketRequest', - 'CreateExclusionRequest', - 'CreateLinkRequest', - 'CreateSinkRequest', - 'CreateViewRequest', - 'DeleteBucketRequest', - 'DeleteExclusionRequest', - 'DeleteLinkRequest', - 'DeleteSinkRequest', - 'DeleteViewRequest', - 'GetBucketRequest', - 'GetCmekSettingsRequest', - 'GetExclusionRequest', - 'GetLinkRequest', - 'GetSettingsRequest', - 'GetSinkRequest', - 'GetViewRequest', - 'IndexConfig', - 'Link', - 'LinkMetadata', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'ListLinksRequest', - 'ListLinksResponse', - 'ListSinksRequest', - 'ListSinksResponse', - 'ListViewsRequest', - 'ListViewsResponse', - 'LocationMetadata', - 'LogBucket', - 'LogExclusion', - 'LogSink', - 'LogView', - 'Settings', - 'UndeleteBucketRequest', - 'UpdateBucketRequest', - 'UpdateCmekSettingsRequest', - 'UpdateExclusionRequest', - 'UpdateSettingsRequest', - 'UpdateSinkRequest', - 'UpdateViewRequest', - 'IndexType', - 'LifecycleState', - 'OperationState', - 'CreateLogMetricRequest', - 'DeleteLogMetricRequest', - 'GetLogMetricRequest', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'LogMetric', - 'UpdateLogMetricRequest', + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "Settings", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "IndexType", + "LifecycleState", + "OperationState", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "LogMetric", + "UpdateLogMetricRequest", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index 106a4c850dad..b381dd1eb9a7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -17,23 +17,21 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore import google.logging.type.http_request_pb2 as http_request_pb2 # type: ignore import google.logging.type.log_severity_pb2 as log_severity_pb2 # type: ignore import google.protobuf.any_pb2 as any_pb2 # type: ignore import google.protobuf.struct_pb2 as struct_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", }, ) @@ -249,18 +247,18 @@ class LogEntry(proto.Message): proto_payload: any_pb2.Any = proto.Field( proto.MESSAGE, number=2, - oneof='payload', + oneof="payload", message=any_pb2.Any, ) text_payload: str = proto.Field( proto.STRING, number=3, - oneof='payload', + oneof="payload", ) json_payload: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=6, - oneof='payload', + oneof="payload", message=struct_pb2.Struct, ) timestamp: timestamp_pb2.Timestamp = proto.Field( @@ -292,10 +290,10 @@ class LogEntry(proto.Message): proto.STRING, number=11, ) - operation: 'LogEntryOperation' = proto.Field( + operation: "LogEntryOperation" = proto.Field( proto.MESSAGE, number=15, - message='LogEntryOperation', + message="LogEntryOperation", ) trace: str = proto.Field( proto.STRING, @@ -309,15 +307,15 @@ class LogEntry(proto.Message): proto.BOOL, number=30, ) - source_location: 'LogEntrySourceLocation' = proto.Field( + source_location: "LogEntrySourceLocation" = proto.Field( proto.MESSAGE, number=23, - message='LogEntrySourceLocation', + message="LogEntrySourceLocation", ) - split: 'LogSplit' = proto.Field( + split: "LogSplit" = proto.Field( proto.MESSAGE, number=35, - message='LogSplit', + message="LogSplit", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index 58b6168d7c26..9269d54e0d98 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -17,29 +17,27 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.logging_v2.types import log_entry import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.rpc.status_pb2 as status_pb2 # type: ignore - +import proto # type: ignore +from google.cloud.logging_v2.types import log_entry __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'DeleteLogRequest', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', + "DeleteLogRequest", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "ListLogsRequest", + "ListLogsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", }, ) @@ -191,8 +189,7 @@ class WriteLogEntriesRequest(proto.Message): class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries. - """ + r"""Result returned from WriteLogEntries.""" class WriteLogEntriesPartialErrors(proto.Message): @@ -376,7 +373,9 @@ class ListMonitoredResourceDescriptorsResponse(proto.Message): def raw_page(self): return self - resource_descriptors: MutableSequence[monitored_resource_pb2.MonitoredResourceDescriptor] = proto.RepeatedField( + resource_descriptors: MutableSequence[ + monitored_resource_pb2.MonitoredResourceDescriptor + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=monitored_resource_pb2.MonitoredResourceDescriptor, @@ -556,6 +555,7 @@ class SuppressionInfo(proto.Message): A lower bound on the count of entries omitted due to ``reason``. """ + class Reason(proto.Enum): r"""An indicator of why entries were omitted. @@ -571,14 +571,15 @@ class Reason(proto.Enum): Indicates suppression occurred due to the client not consuming responses quickly enough. """ + REASON_UNSPECIFIED = 0 RATE_LIMIT = 1 NOT_CONSUMED = 2 - reason: 'TailLogEntriesResponse.SuppressionInfo.Reason' = proto.Field( + reason: "TailLogEntriesResponse.SuppressionInfo.Reason" = proto.Field( proto.ENUM, number=1, - enum='TailLogEntriesResponse.SuppressionInfo.Reason', + enum="TailLogEntriesResponse.SuppressionInfo.Reason", ) suppressed_count: int = proto.Field( proto.INT32, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index 50c894e3883d..78792702a5dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -17,68 +17,66 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'OperationState', - 'LifecycleState', - 'IndexType', - 'IndexConfig', - 'LogBucket', - 'LogView', - 'LogSink', - 'BigQueryDataset', - 'Link', - 'BigQueryOptions', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'CreateBucketRequest', - 'UpdateBucketRequest', - 'GetBucketRequest', - 'DeleteBucketRequest', - 'UndeleteBucketRequest', - 'ListViewsRequest', - 'ListViewsResponse', - 'CreateViewRequest', - 'UpdateViewRequest', - 'GetViewRequest', - 'DeleteViewRequest', - 'ListSinksRequest', - 'ListSinksResponse', - 'GetSinkRequest', - 'CreateSinkRequest', - 'UpdateSinkRequest', - 'DeleteSinkRequest', - 'CreateLinkRequest', - 'DeleteLinkRequest', - 'ListLinksRequest', - 'ListLinksResponse', - 'GetLinkRequest', - 'LogExclusion', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'GetExclusionRequest', - 'CreateExclusionRequest', - 'UpdateExclusionRequest', - 'DeleteExclusionRequest', - 'GetCmekSettingsRequest', - 'UpdateCmekSettingsRequest', - 'CmekSettings', - 'GetSettingsRequest', - 'UpdateSettingsRequest', - 'Settings', - 'CopyLogEntriesRequest', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesResponse', - 'BucketMetadata', - 'LinkMetadata', - 'LocationMetadata', + "OperationState", + "LifecycleState", + "IndexType", + "IndexConfig", + "LogBucket", + "LogView", + "LogSink", + "BigQueryDataset", + "Link", + "BigQueryOptions", + "ListBucketsRequest", + "ListBucketsResponse", + "CreateBucketRequest", + "UpdateBucketRequest", + "GetBucketRequest", + "DeleteBucketRequest", + "UndeleteBucketRequest", + "ListViewsRequest", + "ListViewsResponse", + "CreateViewRequest", + "UpdateViewRequest", + "GetViewRequest", + "DeleteViewRequest", + "ListSinksRequest", + "ListSinksResponse", + "GetSinkRequest", + "CreateSinkRequest", + "UpdateSinkRequest", + "DeleteSinkRequest", + "CreateLinkRequest", + "DeleteLinkRequest", + "ListLinksRequest", + "ListLinksResponse", + "GetLinkRequest", + "LogExclusion", + "ListExclusionsRequest", + "ListExclusionsResponse", + "GetExclusionRequest", + "CreateExclusionRequest", + "UpdateExclusionRequest", + "DeleteExclusionRequest", + "GetCmekSettingsRequest", + "UpdateCmekSettingsRequest", + "CmekSettings", + "GetSettingsRequest", + "UpdateSettingsRequest", + "Settings", + "CopyLogEntriesRequest", + "CopyLogEntriesMetadata", + "CopyLogEntriesResponse", + "BucketMetadata", + "LinkMetadata", + "LocationMetadata", }, ) @@ -107,6 +105,7 @@ class OperationState(proto.Enum): OPERATION_STATE_CANCELLED (6): The operation was cancelled by the user. """ + OPERATION_STATE_UNSPECIFIED = 0 OPERATION_STATE_SCHEDULED = 1 OPERATION_STATE_WAITING_FOR_PERMISSIONS = 2 @@ -140,6 +139,7 @@ class LifecycleState(proto.Enum): FAILED (5): The resource is in an INTERNAL error state. """ + LIFECYCLE_STATE_UNSPECIFIED = 0 ACTIVE = 1 DELETE_REQUESTED = 2 @@ -160,6 +160,7 @@ class IndexType(proto.Enum): INDEX_TYPE_INTEGER (2): The index is a integer-type index. """ + INDEX_TYPE_UNSPECIFIED = 0 INDEX_TYPE_STRING = 1 INDEX_TYPE_INTEGER = 2 @@ -191,10 +192,10 @@ class IndexConfig(proto.Message): proto.STRING, number=1, ) - type_: 'IndexType' = proto.Field( + type_: "IndexType" = proto.Field( proto.ENUM, number=2, - enum='IndexType', + enum="IndexType", ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -300,10 +301,10 @@ class LogBucket(proto.Message): proto.BOOL, number=9, ) - lifecycle_state: 'LifecycleState' = proto.Field( + lifecycle_state: "LifecycleState" = proto.Field( proto.ENUM, number=12, - enum='LifecycleState', + enum="LifecycleState", ) analytics_enabled: bool = proto.Field( proto.BOOL, @@ -313,15 +314,15 @@ class LogBucket(proto.Message): proto.STRING, number=15, ) - index_configs: MutableSequence['IndexConfig'] = proto.RepeatedField( + index_configs: MutableSequence["IndexConfig"] = proto.RepeatedField( proto.MESSAGE, number=17, - message='IndexConfig', + message="IndexConfig", ) - cmek_settings: 'CmekSettings' = proto.Field( + cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=19, - message='CmekSettings', + message="CmekSettings", ) @@ -500,6 +501,7 @@ class LogSink(proto.Message): sink. This field may not be present for older sinks. """ + class VersionFormat(proto.Enum): r"""Deprecated. This is unused. @@ -512,6 +514,7 @@ class VersionFormat(proto.Enum): V1 (2): ``LogEntry`` version 1 format. """ + VERSION_FORMAT_UNSPECIFIED = 0 V2 = 1 V1 = 2 @@ -536,10 +539,10 @@ class VersionFormat(proto.Enum): proto.BOOL, number=19, ) - exclusions: MutableSequence['LogExclusion'] = proto.RepeatedField( + exclusions: MutableSequence["LogExclusion"] = proto.RepeatedField( proto.MESSAGE, number=16, - message='LogExclusion', + message="LogExclusion", ) output_version_format: VersionFormat = proto.Field( proto.ENUM, @@ -554,11 +557,11 @@ class VersionFormat(proto.Enum): proto.BOOL, number=9, ) - bigquery_options: 'BigQueryOptions' = proto.Field( + bigquery_options: "BigQueryOptions" = proto.Field( proto.MESSAGE, number=12, - oneof='options', - message='BigQueryOptions', + oneof="options", + message="BigQueryOptions", ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -644,15 +647,15 @@ class Link(proto.Message): number=3, message=timestamp_pb2.Timestamp, ) - lifecycle_state: 'LifecycleState' = proto.Field( + lifecycle_state: "LifecycleState" = proto.Field( proto.ENUM, number=4, - enum='LifecycleState', + enum="LifecycleState", ) - bigquery_dataset: 'BigQueryDataset' = proto.Field( + bigquery_dataset: "BigQueryDataset" = proto.Field( proto.MESSAGE, number=5, - message='BigQueryDataset', + message="BigQueryDataset", ) @@ -755,10 +758,10 @@ class ListBucketsResponse(proto.Message): def raw_page(self): return self - buckets: MutableSequence['LogBucket'] = proto.RepeatedField( + buckets: MutableSequence["LogBucket"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogBucket', + message="LogBucket", ) next_page_token: str = proto.Field( proto.STRING, @@ -800,10 +803,10 @@ class CreateBucketRequest(proto.Message): proto.STRING, number=2, ) - bucket: 'LogBucket' = proto.Field( + bucket: "LogBucket" = proto.Field( proto.MESSAGE, number=3, - message='LogBucket', + message="LogBucket", ) @@ -842,10 +845,10 @@ class UpdateBucketRequest(proto.Message): proto.STRING, number=1, ) - bucket: 'LogBucket' = proto.Field( + bucket: "LogBucket" = proto.Field( proto.MESSAGE, number=2, - message='LogBucket', + message="LogBucket", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -985,10 +988,10 @@ class ListViewsResponse(proto.Message): def raw_page(self): return self - views: MutableSequence['LogView'] = proto.RepeatedField( + views: MutableSequence["LogView"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogView', + message="LogView", ) next_page_token: str = proto.Field( proto.STRING, @@ -1027,10 +1030,10 @@ class CreateViewRequest(proto.Message): proto.STRING, number=2, ) - view: 'LogView' = proto.Field( + view: "LogView" = proto.Field( proto.MESSAGE, number=3, - message='LogView', + message="LogView", ) @@ -1066,10 +1069,10 @@ class UpdateViewRequest(proto.Message): proto.STRING, number=1, ) - view: 'LogView' = proto.Field( + view: "LogView" = proto.Field( proto.MESSAGE, number=2, - message='LogView', + message="LogView", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -1181,10 +1184,10 @@ class ListSinksResponse(proto.Message): def raw_page(self): return self - sinks: MutableSequence['LogSink'] = proto.RepeatedField( + sinks: MutableSequence["LogSink"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogSink', + message="LogSink", ) next_page_token: str = proto.Field( proto.STRING, @@ -1259,10 +1262,10 @@ class CreateSinkRequest(proto.Message): proto.STRING, number=1, ) - sink: 'LogSink' = proto.Field( + sink: "LogSink" = proto.Field( proto.MESSAGE, number=2, - message='LogSink', + message="LogSink", ) unique_writer_identity: bool = proto.Field( proto.BOOL, @@ -1331,10 +1334,10 @@ class UpdateSinkRequest(proto.Message): proto.STRING, number=1, ) - sink: 'LogSink' = proto.Field( + sink: "LogSink" = proto.Field( proto.MESSAGE, number=2, - message='LogSink', + message="LogSink", ) unique_writer_identity: bool = proto.Field( proto.BOOL, @@ -1399,10 +1402,10 @@ class CreateLinkRequest(proto.Message): proto.STRING, number=1, ) - link: 'Link' = proto.Field( + link: "Link" = proto.Field( proto.MESSAGE, number=2, - message='Link', + message="Link", ) link_id: str = proto.Field( proto.STRING, @@ -1481,10 +1484,10 @@ class ListLinksResponse(proto.Message): def raw_page(self): return self - links: MutableSequence['Link'] = proto.RepeatedField( + links: MutableSequence["Link"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Link', + message="Link", ) next_page_token: str = proto.Field( proto.STRING, @@ -1643,10 +1646,10 @@ class ListExclusionsResponse(proto.Message): def raw_page(self): return self - exclusions: MutableSequence['LogExclusion'] = proto.RepeatedField( + exclusions: MutableSequence["LogExclusion"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogExclusion', + message="LogExclusion", ) next_page_token: str = proto.Field( proto.STRING, @@ -1708,10 +1711,10 @@ class CreateExclusionRequest(proto.Message): proto.STRING, number=1, ) - exclusion: 'LogExclusion' = proto.Field( + exclusion: "LogExclusion" = proto.Field( proto.MESSAGE, number=2, - message='LogExclusion', + message="LogExclusion", ) @@ -1752,10 +1755,10 @@ class UpdateExclusionRequest(proto.Message): proto.STRING, number=1, ) - exclusion: 'LogExclusion' = proto.Field( + exclusion: "LogExclusion" = proto.Field( proto.MESSAGE, number=2, - message='LogExclusion', + message="LogExclusion", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -1874,10 +1877,10 @@ class UpdateCmekSettingsRequest(proto.Message): proto.STRING, number=1, ) - cmek_settings: 'CmekSettings' = proto.Field( + cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=2, - message='CmekSettings', + message="CmekSettings", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2073,10 +2076,10 @@ class UpdateSettingsRequest(proto.Message): proto.STRING, number=1, ) - settings: 'Settings' = proto.Field( + settings: "Settings" = proto.Field( proto.MESSAGE, number=2, - message='Settings', + message="Settings", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2249,19 +2252,19 @@ class CopyLogEntriesMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) cancellation_requested: bool = proto.Field( proto.BOOL, number=4, ) - request: 'CopyLogEntriesRequest' = proto.Field( + request: "CopyLogEntriesRequest" = proto.Field( proto.MESSAGE, number=5, - message='CopyLogEntriesRequest', + message="CopyLogEntriesRequest", ) progress: int = proto.Field( proto.INT32, @@ -2324,22 +2327,22 @@ class BucketMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) - create_bucket_request: 'CreateBucketRequest' = proto.Field( + create_bucket_request: "CreateBucketRequest" = proto.Field( proto.MESSAGE, number=4, - oneof='request', - message='CreateBucketRequest', + oneof="request", + message="CreateBucketRequest", ) - update_bucket_request: 'UpdateBucketRequest' = proto.Field( + update_bucket_request: "UpdateBucketRequest" = proto.Field( proto.MESSAGE, number=5, - oneof='request', - message='UpdateBucketRequest', + oneof="request", + message="UpdateBucketRequest", ) @@ -2380,22 +2383,22 @@ class LinkMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) - create_link_request: 'CreateLinkRequest' = proto.Field( + create_link_request: "CreateLinkRequest" = proto.Field( proto.MESSAGE, number=4, - oneof='request', - message='CreateLinkRequest', + oneof="request", + message="CreateLinkRequest", ) - delete_link_request: 'DeleteLinkRequest' = proto.Field( + delete_link_request: "DeleteLinkRequest" = proto.Field( proto.MESSAGE, number=5, - oneof='request', - message='DeleteLinkRequest', + oneof="request", + message="DeleteLinkRequest", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index 4fb515d770d4..3fe0393c0e83 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -17,23 +17,21 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.api.distribution_pb2 as distribution_pb2 # type: ignore import google.api.metric_pb2 as metric_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'LogMetric', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'GetLogMetricRequest', - 'CreateLogMetricRequest', - 'UpdateLogMetricRequest', - 'DeleteLogMetricRequest', + "LogMetric", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "GetLogMetricRequest", + "CreateLogMetricRequest", + "UpdateLogMetricRequest", + "DeleteLogMetricRequest", }, ) @@ -180,6 +178,7 @@ class LogMetric(proto.Message): updated this metric. The v2 format is used by default and cannot be changed. """ + class ApiVersion(proto.Enum): r"""Logging API version. @@ -189,6 +188,7 @@ class ApiVersion(proto.Enum): V1 (1): Logging API v1. """ + V2 = 0 V1 = 1 @@ -302,10 +302,10 @@ class ListLogMetricsResponse(proto.Message): def raw_page(self): return self - metrics: MutableSequence['LogMetric'] = proto.RepeatedField( + metrics: MutableSequence["LogMetric"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogMetric', + message="LogMetric", ) next_page_token: str = proto.Field( proto.STRING, @@ -353,10 +353,10 @@ class CreateLogMetricRequest(proto.Message): proto.STRING, number=1, ) - metric: 'LogMetric' = proto.Field( + metric: "LogMetric" = proto.Field( proto.MESSAGE, number=2, - message='LogMetric', + message="LogMetric", ) @@ -383,10 +383,10 @@ class UpdateLogMetricRequest(proto.Message): proto.STRING, number=1, ) - metric: 'LogMetric' = proto.Field( + metric: "LogMetric" = proto.Field( proto.MESSAGE, number=2, - message='LogMetric', + message="LogMetric", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 491848c947bd..52a241a3d4bb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -17,9 +17,8 @@ import pathlib import re import shutil - -from typing import Dict, List import warnings +from typing import Dict, List import nox @@ -154,7 +153,8 @@ def lint(session): # 2. Check formatting session.run( - "ruff", "format", + "ruff", + "format", "--check", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", @@ -167,12 +167,15 @@ def lint(session): @nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): """(Deprecated) Legacy session. Please use 'nox -s format'.""" - session.log("WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future.") + session.log( + "WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future." + ) # Just run the ruff formatter (keeping legacy behavior of only formatting, not sorting imports) session.install(RUFF_VERSION) session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", *LINT_PATHS, @@ -191,8 +194,10 @@ def format(session): # check --select I: Enables strict import sorting # --fix: Applies the changes automatically session.run( - "ruff", "check", - "--select", "I", + "ruff", + "check", + "--select", + "I", "--fix", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length @@ -201,7 +206,8 @@ def format(session): # 3. Run Ruff to format code session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length *LINT_PATHS, @@ -386,8 +392,10 @@ def docs(session): "sphinx-build", "-T", # show full traceback on exception "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + "-b", + "html", # builder + "-d", + os.path.join("docs", "_build", "doctrees", ""), # cache directory # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py index e395997992a5..69f5dee1bb70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -54,4 +54,5 @@ async def sample_copy_log_entries(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py index cfa20a4443d8..40bb06e2d9ea 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -54,4 +54,5 @@ def sample_copy_log_entries(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index 5ee585345cca..3b90a46329e6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -50,4 +50,5 @@ async def sample_create_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py index d7f66d809ab2..00018f0383bd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -54,4 +54,5 @@ async def sample_create_bucket_async(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py index e279c8af429b..87464372671d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -54,4 +54,5 @@ def sample_create_bucket_async(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index 9cee676e79ed..4c4d327bec9d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -50,4 +50,5 @@ def sample_create_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index a40632d36959..0b02b47a8613 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -54,4 +54,5 @@ async def sample_create_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index 05428dd18de9..95a87a648e99 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -54,4 +54,5 @@ def sample_create_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py index 0bb48a0774c9..8d036b4e8f67 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py @@ -54,4 +54,5 @@ async def sample_create_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateLink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py index 2564dddc6249..2aa01d9121fd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py @@ -54,4 +54,5 @@ def sample_create_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateLink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index ae12b2a42007..f20329d9ea54 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -54,4 +54,5 @@ async def sample_create_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index 9fa8b6fd74f4..7b17279bd47c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -54,4 +54,5 @@ def sample_create_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index f885d552594a..85566d5b03d2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -50,4 +50,5 @@ async def sample_create_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index e2689d1d0cf1..d1a2ace5a739 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -50,4 +50,5 @@ def sample_create_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py index 6728984a0225..c7a210badf41 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py @@ -53,4 +53,5 @@ async def sample_delete_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_DeleteLink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py index 26018c30339b..4ce56dfd2aad 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py @@ -53,4 +53,5 @@ def sample_delete_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_DeleteLink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index 2dc4d347b70f..a6130497c5cb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -49,4 +49,5 @@ async def sample_get_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index 2c8728a187f9..1e379b3ea946 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -49,4 +49,5 @@ def sample_get_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 96570c459f11..b17e8c502324 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -49,4 +49,5 @@ async def sample_get_cmek_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index dba97688293b..d0e027a60a98 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -49,4 +49,5 @@ def sample_get_cmek_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index 34a231caa741..321b23a18b17 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -49,4 +49,5 @@ async def sample_get_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index a8820f65ab4a..b3c0f165178e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -49,4 +49,5 @@ def sample_get_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py index ee2aefa35041..3063faa7d1e3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py @@ -49,4 +49,5 @@ async def sample_get_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetLink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py index e29e7cd54ccd..177de1a53df8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py @@ -49,4 +49,5 @@ def sample_get_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetLink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py index f7c974545c69..1f50cdd5088c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -49,4 +49,5 @@ async def sample_get_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py index 0511154a762e..b1f4cf7f7332 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -49,4 +49,5 @@ def sample_get_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index 508004fe95d0..720058e90b4e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -49,4 +49,5 @@ async def sample_get_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index 6b62d1fc795e..df69f020d868 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -49,4 +49,5 @@ def sample_get_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 5a5d1924651e..3bc36755f91e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -49,4 +49,5 @@ async def sample_get_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index a3dd884f135a..81f941ef9c3a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -49,4 +49,5 @@ def sample_get_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index 0022cc9ad990..87ba21687555 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -50,4 +50,5 @@ async def sample_list_buckets(): async for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListBuckets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index bba54f0a35a3..fdf75bb9d905 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -50,4 +50,5 @@ def sample_list_buckets(): for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListBuckets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index 2ebd52d1e1da..1d79609926fd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -50,4 +50,5 @@ async def sample_list_exclusions(): async for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListExclusions_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index 4610d4d89961..04b3be8bfbe1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -50,4 +50,5 @@ def sample_list_exclusions(): for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListExclusions_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py index b58baa85d69f..32ace3c31ef9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py @@ -50,4 +50,5 @@ async def sample_list_links(): async for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListLinks_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py index 9b390a50f28b..f46848c5286d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py @@ -50,4 +50,5 @@ def sample_list_links(): for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListLinks_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index 7309d5947176..8f1d6108a581 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -50,4 +50,5 @@ async def sample_list_sinks(): async for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListSinks_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index 8ce72154b09f..49ce98888a5b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -50,4 +50,5 @@ def sample_list_sinks(): for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListSinks_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index 165ac3c53ceb..08fc8c9f3c27 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -50,4 +50,5 @@ async def sample_list_views(): async for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListViews_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index 9f17c32de4a5..768fdeede363 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -50,4 +50,5 @@ def sample_list_views(): for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListViews_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index 5ad81f1298b9..872b3749dc45 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -49,4 +49,5 @@ async def sample_update_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py index 4f9dde2bc0a2..746ce0c21241 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -53,4 +53,5 @@ async def sample_update_bucket_async(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py index ec3a04b544ea..d1fcb2f2b285 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -53,4 +53,5 @@ def sample_update_bucket_async(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index cb29502c3c40..bddc0acb5138 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -49,4 +49,5 @@ def sample_update_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index 7d8189d4407b..25eda53f36a1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -49,4 +49,5 @@ async def sample_update_cmek_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index ea785cb38035..83b41667d5f5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -49,4 +49,5 @@ def sample_update_cmek_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index f61b9f391ebf..c8948d5dd845 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -54,4 +54,5 @@ async def sample_update_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index c721650365bb..96db529a7e28 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -54,4 +54,5 @@ def sample_update_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py index 0fc190a8dcd8..b943a3224bee 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -49,4 +49,5 @@ async def sample_update_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py index 7992e5b91ad7..f14dc4a30cfc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -49,4 +49,5 @@ def sample_update_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index e3ca79038521..95e5090076e7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -54,4 +54,5 @@ async def sample_update_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index e7f6ad63cccb..990e2fac25a2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -54,4 +54,5 @@ def sample_update_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index b9eddde3d6da..210b3ac7e110 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -49,4 +49,5 @@ async def sample_update_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index ff51d39661f2..3718d586ac59 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -49,4 +49,5 @@ def sample_update_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index c421061799d6..279dd4e70cf3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -40,7 +40,7 @@ async def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value1', 'resource_names_value2'], + resource_names=["resource_names_value1", "resource_names_value2"], ) # Make the request @@ -50,4 +50,5 @@ async def sample_list_log_entries(): async for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index 06ca2f113fd9..2afed02b2f85 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -40,7 +40,7 @@ def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value1', 'resource_names_value2'], + resource_names=["resource_names_value1", "resource_names_value2"], ) # Make the request @@ -50,4 +50,5 @@ def sample_list_log_entries(): for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index d33584db700d..abe4a9edb53b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -50,4 +50,5 @@ async def sample_list_logs(): async for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListLogs_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index 11ef98ea7222..829706dd4085 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -50,4 +50,5 @@ def sample_list_logs(): for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListLogs_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index 574a533dd739..fe8cfdf55ff7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -39,8 +39,7 @@ async def sample_list_monitored_resource_descriptors(): client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListMonitoredResourceDescriptorsRequest( - ) + request = logging_v2.ListMonitoredResourceDescriptorsRequest() # Make the request page_result = client.list_monitored_resource_descriptors(request=request) @@ -49,4 +48,5 @@ async def sample_list_monitored_resource_descriptors(): async for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index ca6b21d3b0e0..fc1587be7a21 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -39,8 +39,7 @@ def sample_list_monitored_resource_descriptors(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListMonitoredResourceDescriptorsRequest( - ) + request = logging_v2.ListMonitoredResourceDescriptorsRequest() # Make the request page_result = client.list_monitored_resource_descriptors(request=request) @@ -49,4 +48,5 @@ def sample_list_monitored_resource_descriptors(): for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 646ec1476b63..ec9ef2f25aaa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -40,7 +40,7 @@ async def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value1', 'resource_names_value2'], + resource_names=["resource_names_value1", "resource_names_value2"], ) # This method expects an iterator which contains @@ -60,4 +60,5 @@ def request_generator(): async for response in stream: print(response) + # [END logging_v2_generated_LoggingServiceV2_TailLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index aab2284789e3..440ac83d6ee1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -40,7 +40,7 @@ def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value1', 'resource_names_value2'], + resource_names=["resource_names_value1", "resource_names_value2"], ) # This method expects an iterator which contains @@ -60,4 +60,5 @@ def request_generator(): for response in stream: print(response) + # [END logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index 75c30e24dd70..740fb8e5bf31 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -52,4 +52,5 @@ async def sample_write_log_entries(): # Handle the response print(response) + # [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index 78b2c8242f9d..06198642f49b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -52,4 +52,5 @@ def sample_write_log_entries(): # Handle the response print(response) + # [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index c753d03b1585..32bc34777183 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -54,4 +54,5 @@ async def sample_create_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index 307c0ac42604..80a5ac9ff034 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -54,4 +54,5 @@ def sample_create_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index 997d7dcec72c..1194636bf6a5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -49,4 +49,5 @@ async def sample_get_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_GetLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index 71c1f95dd1ea..22bf03e1630f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -49,4 +49,5 @@ def sample_get_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_GetLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index 102121053298..3b8e1848dc69 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -50,4 +50,5 @@ async def sample_list_log_metrics(): async for response in page_result: print(response) + # [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index 9f8d13b6fcd1..1c25ceb4e60b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -50,4 +50,5 @@ def sample_list_log_metrics(): for response in page_result: print(response) + # [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index 6b2ad146c62a..ec6ce082b438 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -54,4 +54,5 @@ async def sample_update_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index 59a49b611fa2..bbf5f75ba0a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -54,4 +54,5 @@ def sample_update_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index d330baf0cfcd..12f029f82f06 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -17,20 +17,20 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-cloud-logging' +name = "google-cloud-logging" description = "Google Cloud Logging API client library" version = None -with open(os.path.join(package_root, 'google/cloud/logging/gapic_version.py')) as fp: +with open(os.path.join(package_root, "google/cloud/logging/gapic_version.py")) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": @@ -49,8 +49,7 @@ "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf >= 4.25.8, < 8.0.0", ] -extras = { -} +extras = {} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-logging" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index a82aa311bceb..af57f8a9e480 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,48 +22,52 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio import json import math +from collections.abc import Mapping, Sequence + +import grpc import pytest -from collections.abc import Sequence, Mapping from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from grpc.experimental import aio from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import client_options +import google.api_core.operation_async as operation_async # type: ignore +import google.auth +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2AsyncClient -from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client -from google.cloud.logging_v2.services.config_service_v2 import pagers -from google.cloud.logging_v2.services.config_service_v2 import transports +from google.cloud.logging_v2.services.config_service_v2 import ( + ConfigServiceV2AsyncClient, + ConfigServiceV2Client, + pagers, + transports, +) from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account -import google.api_core.operation_async as operation_async # type: ignore -import google.auth -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore -import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - - CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -77,9 +82,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -87,17 +94,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -109,21 +126,47 @@ def test__get_default_mtls_endpoint(): custom_endpoint = ".custom" assert ConfigServiceV2Client._get_default_mtls_endpoint(None) is None - assert ConfigServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ConfigServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert ConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - assert ConfigServiceV2Client._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + assert ( + ConfigServiceV2Client._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ConfigServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + assert ( + ConfigServiceV2Client._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + def test__read_environment_variables(): assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert ConfigServiceV2Client._read_environment_variables() == (True, "auto", None) + assert ConfigServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", None) + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} @@ -137,27 +180,46 @@ def test__read_environment_variables(): ) else: assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ConfigServiceV2Client._read_environment_variables() == ( False, - "auto", + "never", None, ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert ConfigServiceV2Client._read_environment_variables() == (False, "never", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert ConfigServiceV2Client._read_environment_variables() == (False, "always", None) + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", None) + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: ConfigServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -166,7 +228,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert ConfigServiceV2Client._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -174,7 +238,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert ConfigServiceV2Client._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -186,7 +252,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert ConfigServiceV2Client._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -198,7 +266,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert ConfigServiceV2Client._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -210,7 +280,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert ConfigServiceV2Client._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -225,83 +297,167 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): ConfigServiceV2Client._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert ConfigServiceV2Client._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert ConfigServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert ConfigServiceV2Client._get_client_cert_source(None, False) is None - assert ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None - assert ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert ConfigServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source - assert ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ConfigServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ConfigServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) -@mock.patch.object(ConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2Client)) -@mock.patch.object(ConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2AsyncClient)) + +@mock.patch.object( + ConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert ConfigServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert ConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert ConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert ConfigServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + ConfigServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "always") + == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - ConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + ConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert ConfigServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert ConfigServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert ConfigServiceV2Client._get_universe_domain(None, None) == ConfigServiceV2Client._DEFAULT_UNIVERSE + assert ( + ConfigServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ConfigServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ConfigServiceV2Client._get_universe_domain(None, None) + == ConfigServiceV2Client._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: ConfigServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -317,7 +473,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -330,59 +487,83 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (ConfigServiceV2Client, "grpc"), - (ConfigServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_config_service_v2_client_from_service_account_info(client_class, transport_name): + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConfigServiceV2Client, "grpc"), + (ConfigServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_config_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.ConfigServiceV2GrpcTransport, "grpc"), - (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_config_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ConfigServiceV2GrpcTransport, "grpc"), + (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_config_service_v2_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (ConfigServiceV2Client, "grpc"), - (ConfigServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_config_service_v2_client_from_service_account_file(client_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ConfigServiceV2Client, "grpc"), + (ConfigServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_config_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") def test_config_service_v2_client_get_transport_class(): @@ -396,29 +577,44 @@ def test_config_service_v2_client_get_transport_class(): assert transport == transports.ConfigServiceV2GrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(ConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2Client)) -@mock.patch.object(ConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2AsyncClient)) -def test_config_service_v2_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + ConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), +) +def test_config_service_v2_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(ConfigServiceV2Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(ConfigServiceV2Client, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(ConfigServiceV2Client, 'get_transport_class') as gtc: + with mock.patch.object(ConfigServiceV2Client, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -436,13 +632,15 @@ def test_config_service_v2_client_client_options(client_class, transport_class, # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -454,7 +652,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -474,17 +672,22 @@ def test_config_service_v2_client_client_options(client_class, transport_class, with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -493,46 +696,90 @@ def test_config_service_v2_client_client_options(client_class, transport_class, api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" + api_audience="https://language.googleapis.com", ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "true"), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "false"), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(ConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2Client)) -@mock.patch.object(ConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2AsyncClient)) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + "true", + ), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + ConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + "false", + ), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + ConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_config_service_v2_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -551,12 +798,22 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -577,15 +834,22 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -595,19 +859,31 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t ) -@pytest.mark.parametrize("client_class", [ - ConfigServiceV2Client, ConfigServiceV2AsyncClient -]) -@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) -@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +@pytest.mark.parametrize( + "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient] +) +@mock.patch.object( + ConfigServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfigServiceV2AsyncClient), +) def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -615,18 +891,25 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -663,23 +946,23 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -710,23 +993,23 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -742,16 +1025,27 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -761,27 +1055,50 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + -@pytest.mark.parametrize("client_class", [ - ConfigServiceV2Client, ConfigServiceV2AsyncClient -]) -@mock.patch.object(ConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2Client)) -@mock.patch.object(ConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2AsyncClient)) +@pytest.mark.parametrize( + "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient] +) +@mock.patch.object( + ConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ConfigServiceV2AsyncClient), +) def test_config_service_v2_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -804,11 +1121,19 @@ def test_config_service_v2_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -816,26 +1141,39 @@ def test_config_service_v2_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_config_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_config_service_v2_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -844,23 +1182,39 @@ def test_config_service_v2_client_client_options_scopes(client_class, transport_ api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", grpc_helpers), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_config_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_config_service_v2_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -869,11 +1223,14 @@ def test_config_service_v2_client_client_options_credentials_file(client_class, api_audience=None, ) + def test_config_service_v2_client_client_options_from_dict(): - with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None client = ConfigServiceV2Client( - client_options={'api_endpoint': 'squid.clam.whelk'} + client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( credentials=None, @@ -888,23 +1245,38 @@ def test_config_service_v2_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", grpc_helpers), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_config_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_config_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -914,13 +1286,13 @@ def test_config_service_v2_client_create_channel_credentials_file(client_class, ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -932,11 +1304,11 @@ def test_config_service_v2_client_create_channel_credentials_file(client_class, credentials_file=None, quota_project_id=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), scopes=None, default_host="logging.googleapis.com", ssl_credentials=None, @@ -947,11 +1319,14 @@ def test_config_service_v2_client_create_channel_credentials_file(client_class, ) -@pytest.mark.parametrize("request_type", [ - logging_config.ListBucketsRequest, - dict, -]) -def test_list_buckets(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListBucketsRequest, + dict, + ], +) +def test_list_buckets(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -962,12 +1337,10 @@ def test_list_buckets(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_buckets(request) @@ -979,7 +1352,7 @@ def test_list_buckets(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_buckets_non_empty_request_with_auto_populated_field(): @@ -987,30 +1360,31 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListBucketsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_buckets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListBucketsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_buckets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1029,7 +1403,9 @@ def test_list_buckets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_buckets] = mock_rpc request = {} client.list_buckets(request) @@ -1043,8 +1419,11 @@ def test_list_buckets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_buckets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1058,12 +1437,17 @@ async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_buckets in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_buckets + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_buckets] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_buckets + ] = mock_rpc request = {} await client.list_buckets(request) @@ -1077,8 +1461,11 @@ async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): +async def test_list_buckets_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1089,13 +1476,13 @@ async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. @@ -1106,13 +1493,14 @@ async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_buckets_async_from_dict(): await test_list_buckets_async(request_type=dict) + def test_list_buckets_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1122,12 +1510,10 @@ def test_list_buckets_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: call.return_value = logging_config.ListBucketsResponse() client.list_buckets(request) @@ -1139,9 +1525,9 @@ def test_list_buckets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1154,13 +1540,13 @@ async def test_list_buckets_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse() + ) await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. @@ -1171,9 +1557,9 @@ async def test_list_buckets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_buckets_flattened(): @@ -1182,15 +1568,13 @@ def test_list_buckets_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListBucketsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_buckets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1198,7 +1582,7 @@ def test_list_buckets_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1212,9 +1596,10 @@ def test_list_buckets_flattened_error(): with pytest.raises(ValueError): client.list_buckets( logging_config.ListBucketsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_buckets_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -1222,17 +1607,17 @@ async def test_list_buckets_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListBucketsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_buckets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1240,9 +1625,10 @@ async def test_list_buckets_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_buckets_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -1254,7 +1640,7 @@ async def test_list_buckets_flattened_error_async(): with pytest.raises(ValueError): await client.list_buckets( logging_config.ListBucketsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1265,9 +1651,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListBucketsResponse( @@ -1276,17 +1660,17 @@ def test_list_buckets_pager(transport_name: str = "grpc"): logging_config.LogBucket(), logging_config.LogBucket(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListBucketsResponse( buckets=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListBucketsResponse( buckets=[ logging_config.LogBucket(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListBucketsResponse( buckets=[ @@ -1301,9 +1685,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_buckets(request={}, retry=retry, timeout=timeout) @@ -1313,8 +1695,9 @@ def test_list_buckets_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_config.LogBucket) - for i in results) + assert all(isinstance(i, logging_config.LogBucket) for i in results) + + def test_list_buckets_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1322,9 +1705,7 @@ def test_list_buckets_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListBucketsResponse( @@ -1333,17 +1714,17 @@ def test_list_buckets_pages(transport_name: str = "grpc"): logging_config.LogBucket(), logging_config.LogBucket(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListBucketsResponse( buckets=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListBucketsResponse( buckets=[ logging_config.LogBucket(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListBucketsResponse( buckets=[ @@ -1354,9 +1735,10 @@ def test_list_buckets_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_buckets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_buckets_async_pager(): client = ConfigServiceV2AsyncClient( @@ -1365,8 +1747,8 @@ async def test_list_buckets_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_buckets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_buckets), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListBucketsResponse( @@ -1375,17 +1757,17 @@ async def test_list_buckets_async_pager(): logging_config.LogBucket(), logging_config.LogBucket(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListBucketsResponse( buckets=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListBucketsResponse( buckets=[ logging_config.LogBucket(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListBucketsResponse( buckets=[ @@ -1395,15 +1777,16 @@ async def test_list_buckets_async_pager(): ), RuntimeError, ) - async_pager = await client.list_buckets(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_buckets( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogBucket) - for i in responses) + assert all(isinstance(i, logging_config.LogBucket) for i in responses) @pytest.mark.asyncio @@ -1414,8 +1797,8 @@ async def test_list_buckets_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_buckets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_buckets), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListBucketsResponse( @@ -1424,17 +1807,17 @@ async def test_list_buckets_async_pages(): logging_config.LogBucket(), logging_config.LogBucket(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListBucketsResponse( buckets=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListBucketsResponse( buckets=[ logging_config.LogBucket(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListBucketsResponse( buckets=[ @@ -1447,18 +1830,22 @@ async def test_list_buckets_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_buckets(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_config.GetBucketRequest, - dict, -]) -def test_get_bucket(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetBucketRequest, + dict, + ], +) +def test_get_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1469,18 +1856,16 @@ def test_get_bucket(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.get_bucket(request) @@ -1492,13 +1877,13 @@ def test_get_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_get_bucket_non_empty_request_with_auto_populated_field(): @@ -1506,28 +1891,29 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetBucketRequest( - name='name_value', + name="name_value", ) + def test_get_bucket_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1546,7 +1932,9 @@ def test_get_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_bucket] = mock_rpc request = {} client.get_bucket(request) @@ -1560,6 +1948,7 @@ def test_get_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1575,12 +1964,17 @@ async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_as wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_bucket in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_bucket + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_bucket] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_bucket + ] = mock_rpc request = {} await client.get_bucket(request) @@ -1594,8 +1988,11 @@ async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): +async def test_get_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1606,19 +2003,19 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) response = await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1629,19 +2026,20 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio async def test_get_bucket_async_from_dict(): await test_get_bucket_async(request_type=dict) + def test_get_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1651,12 +2049,10 @@ def test_get_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.get_bucket(request) @@ -1668,9 +2064,9 @@ def test_get_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1683,13 +2079,13 @@ async def test_get_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1700,16 +2096,19 @@ async def test_get_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.CreateBucketRequest, - dict, -]) -def test_create_bucket_async(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateBucketRequest, + dict, + ], +) +def test_create_bucket_async(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1721,10 +2120,10 @@ def test_create_bucket_async(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: + type(client.transport.create_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -1742,30 +2141,33 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) + def test_create_bucket_async_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1780,12 +2182,18 @@ def test_create_bucket_async_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_bucket_async in client._transport._wrapped_methods + assert ( + client._transport.create_bucket_async in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_bucket_async] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_bucket_async] = ( + mock_rpc + ) request = {} client.create_bucket_async(request) @@ -1803,8 +2211,11 @@ def test_create_bucket_async_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_bucket_async_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1818,12 +2229,17 @@ async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_bucket_async in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_bucket_async + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_bucket_async] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_bucket_async + ] = mock_rpc request = {} await client.create_bucket_async(request) @@ -1842,8 +2258,11 @@ async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): +async def test_create_bucket_async_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1855,11 +2274,11 @@ async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', reques # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: + type(client.transport.create_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.create_bucket_async(request) @@ -1877,6 +2296,7 @@ async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', reques async def test_create_bucket_async_async_from_dict(): await test_create_bucket_async_async(request_type=dict) + def test_create_bucket_async_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1886,13 +2306,13 @@ def test_create_bucket_async_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -1903,9 +2323,9 @@ def test_create_bucket_async_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1918,13 +2338,15 @@ async def test_create_bucket_async_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -1935,16 +2357,19 @@ async def test_create_bucket_async_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateBucketRequest, - dict, -]) -def test_update_bucket_async(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateBucketRequest, + dict, + ], +) +def test_update_bucket_async(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1956,10 +2381,10 @@ def test_update_bucket_async(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: + type(client.transport.update_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -1977,28 +2402,31 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) + def test_update_bucket_async_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2013,12 +2441,18 @@ def test_update_bucket_async_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_bucket_async in client._transport._wrapped_methods + assert ( + client._transport.update_bucket_async in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_bucket_async] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_bucket_async] = ( + mock_rpc + ) request = {} client.update_bucket_async(request) @@ -2036,8 +2470,11 @@ def test_update_bucket_async_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_bucket_async_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2051,12 +2488,17 @@ async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_bucket_async in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_bucket_async + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_bucket_async] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_bucket_async + ] = mock_rpc request = {} await client.update_bucket_async(request) @@ -2075,8 +2517,11 @@ async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): +async def test_update_bucket_async_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2088,11 +2533,11 @@ async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', reques # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: + type(client.transport.update_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.update_bucket_async(request) @@ -2110,6 +2555,7 @@ async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', reques async def test_update_bucket_async_async_from_dict(): await test_update_bucket_async_async(request_type=dict) + def test_update_bucket_async_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2119,13 +2565,13 @@ def test_update_bucket_async_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -2136,9 +2582,9 @@ def test_update_bucket_async_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2151,13 +2597,15 @@ async def test_update_bucket_async_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -2168,16 +2616,19 @@ async def test_update_bucket_async_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.CreateBucketRequest, - dict, -]) -def test_create_bucket(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateBucketRequest, + dict, + ], +) +def test_create_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2188,18 +2639,16 @@ def test_create_bucket(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.create_bucket(request) @@ -2211,13 +2660,13 @@ def test_create_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_create_bucket_non_empty_request_with_auto_populated_field(): @@ -2225,30 +2674,31 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) + def test_create_bucket_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2267,7 +2717,9 @@ def test_create_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_bucket] = mock_rpc request = {} client.create_bucket(request) @@ -2281,8 +2733,11 @@ def test_create_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2296,12 +2751,17 @@ async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_bucket in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_bucket + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_bucket] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_bucket + ] = mock_rpc request = {} await client.create_bucket(request) @@ -2315,8 +2775,11 @@ async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): +async def test_create_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2327,19 +2790,19 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) response = await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -2350,19 +2813,20 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio async def test_create_bucket_async_from_dict(): await test_create_bucket_async(request_type=dict) + def test_create_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2372,12 +2836,10 @@ def test_create_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.create_bucket(request) @@ -2389,9 +2851,9 @@ def test_create_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2404,13 +2866,13 @@ async def test_create_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -2421,16 +2883,19 @@ async def test_create_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateBucketRequest, - dict, -]) -def test_update_bucket(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateBucketRequest, + dict, + ], +) +def test_update_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2441,18 +2906,16 @@ def test_update_bucket(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.update_bucket(request) @@ -2464,13 +2927,13 @@ def test_update_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_update_bucket_non_empty_request_with_auto_populated_field(): @@ -2478,28 +2941,29 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) + def test_update_bucket_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2518,7 +2982,9 @@ def test_update_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_bucket] = mock_rpc request = {} client.update_bucket(request) @@ -2532,8 +2998,11 @@ def test_update_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2547,12 +3016,17 @@ async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_bucket in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_bucket + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_bucket] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_bucket + ] = mock_rpc request = {} await client.update_bucket(request) @@ -2566,8 +3040,11 @@ async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): +async def test_update_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2578,19 +3055,19 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) response = await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -2601,19 +3078,20 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio async def test_update_bucket_async_from_dict(): await test_update_bucket_async(request_type=dict) + def test_update_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2623,12 +3101,10 @@ def test_update_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.update_bucket(request) @@ -2640,9 +3116,9 @@ def test_update_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2655,13 +3131,13 @@ async def test_update_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -2672,16 +3148,19 @@ async def test_update_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteBucketRequest, - dict, -]) -def test_delete_bucket(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteBucketRequest, + dict, + ], +) +def test_delete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2692,9 +3171,7 @@ def test_delete_bucket(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_bucket(request) @@ -2714,28 +3191,29 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteBucketRequest( - name='name_value', + name="name_value", ) + def test_delete_bucket_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2754,7 +3232,9 @@ def test_delete_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_bucket] = mock_rpc request = {} client.delete_bucket(request) @@ -2768,8 +3248,11 @@ def test_delete_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2783,12 +3266,17 @@ async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_bucket in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_bucket + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_bucket] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_bucket + ] = mock_rpc request = {} await client.delete_bucket(request) @@ -2802,8 +3290,11 @@ async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): +async def test_delete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2814,9 +3305,7 @@ async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_bucket(request) @@ -2835,6 +3324,7 @@ async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type async def test_delete_bucket_async_from_dict(): await test_delete_bucket_async(request_type=dict) + def test_delete_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2844,12 +3334,10 @@ def test_delete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = None client.delete_bucket(request) @@ -2861,9 +3349,9 @@ def test_delete_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2876,12 +3364,10 @@ async def test_delete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_bucket(request) @@ -2893,16 +3379,19 @@ async def test_delete_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.UndeleteBucketRequest, - dict, -]) -def test_undelete_bucket(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UndeleteBucketRequest, + dict, + ], +) +def test_undelete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2913,9 +3402,7 @@ def test_undelete_bucket(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.undelete_bucket(request) @@ -2935,28 +3422,29 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UndeleteBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.undelete_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UndeleteBucketRequest( - name='name_value', + name="name_value", ) + def test_undelete_bucket_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2975,7 +3463,9 @@ def test_undelete_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.undelete_bucket] = mock_rpc request = {} client.undelete_bucket(request) @@ -2989,8 +3479,11 @@ def test_undelete_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_undelete_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3004,12 +3497,17 @@ async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.undelete_bucket in client._client._transport._wrapped_methods + assert ( + client._client._transport.undelete_bucket + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.undelete_bucket] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.undelete_bucket + ] = mock_rpc request = {} await client.undelete_bucket(request) @@ -3023,8 +3521,11 @@ async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): +async def test_undelete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3035,9 +3536,7 @@ async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.undelete_bucket(request) @@ -3056,6 +3555,7 @@ async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_ty async def test_undelete_bucket_async_from_dict(): await test_undelete_bucket_async(request_type=dict) + def test_undelete_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3065,12 +3565,10 @@ def test_undelete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = None client.undelete_bucket(request) @@ -3082,9 +3580,9 @@ def test_undelete_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3097,12 +3595,10 @@ async def test_undelete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.undelete_bucket(request) @@ -3114,16 +3610,19 @@ async def test_undelete_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.ListViewsRequest, - dict, -]) -def test_list_views(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListViewsRequest, + dict, + ], +) +def test_list_views(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3134,12 +3633,10 @@ def test_list_views(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_views(request) @@ -3151,7 +3648,7 @@ def test_list_views(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_views_non_empty_request_with_auto_populated_field(): @@ -3159,30 +3656,31 @@ def test_list_views_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListViewsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_views(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListViewsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_views_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3201,7 +3699,9 @@ def test_list_views_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_views] = mock_rpc request = {} client.list_views(request) @@ -3215,6 +3715,7 @@ def test_list_views_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3230,12 +3731,17 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_views in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_views + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_views] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_views + ] = mock_rpc request = {} await client.list_views(request) @@ -3249,8 +3755,11 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): +async def test_list_views_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3261,13 +3770,13 @@ async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=lo request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_views(request) # Establish that the underlying gRPC stub method was called. @@ -3278,13 +3787,14 @@ async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_views_async_from_dict(): await test_list_views_async(request_type=dict) + def test_list_views_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3294,12 +3804,10 @@ def test_list_views_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: call.return_value = logging_config.ListViewsResponse() client.list_views(request) @@ -3311,9 +3819,9 @@ def test_list_views_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3326,13 +3834,13 @@ async def test_list_views_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse() + ) await client.list_views(request) # Establish that the underlying gRPC stub method was called. @@ -3343,9 +3851,9 @@ async def test_list_views_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_views_flattened(): @@ -3354,15 +3862,13 @@ def test_list_views_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_views( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3370,7 +3876,7 @@ def test_list_views_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -3384,9 +3890,10 @@ def test_list_views_flattened_error(): with pytest.raises(ValueError): client.list_views( logging_config.ListViewsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_views_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -3394,17 +3901,17 @@ async def test_list_views_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_views( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3412,9 +3919,10 @@ async def test_list_views_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_views_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -3426,7 +3934,7 @@ async def test_list_views_flattened_error_async(): with pytest.raises(ValueError): await client.list_views( logging_config.ListViewsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -3437,9 +3945,7 @@ def test_list_views_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListViewsResponse( @@ -3448,17 +3954,17 @@ def test_list_views_pager(transport_name: str = "grpc"): logging_config.LogView(), logging_config.LogView(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListViewsResponse( views=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListViewsResponse( views=[ logging_config.LogView(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListViewsResponse( views=[ @@ -3473,9 +3979,7 @@ def test_list_views_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_views(request={}, retry=retry, timeout=timeout) @@ -3485,8 +3989,9 @@ def test_list_views_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_config.LogView) - for i in results) + assert all(isinstance(i, logging_config.LogView) for i in results) + + def test_list_views_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3494,9 +3999,7 @@ def test_list_views_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListViewsResponse( @@ -3505,17 +4008,17 @@ def test_list_views_pages(transport_name: str = "grpc"): logging_config.LogView(), logging_config.LogView(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListViewsResponse( views=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListViewsResponse( views=[ logging_config.LogView(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListViewsResponse( views=[ @@ -3526,9 +4029,10 @@ def test_list_views_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_views(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_views_async_pager(): client = ConfigServiceV2AsyncClient( @@ -3537,8 +4041,8 @@ async def test_list_views_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_views), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListViewsResponse( @@ -3547,17 +4051,17 @@ async def test_list_views_async_pager(): logging_config.LogView(), logging_config.LogView(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListViewsResponse( views=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListViewsResponse( views=[ logging_config.LogView(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListViewsResponse( views=[ @@ -3567,15 +4071,16 @@ async def test_list_views_async_pager(): ), RuntimeError, ) - async_pager = await client.list_views(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_views( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogView) - for i in responses) + assert all(isinstance(i, logging_config.LogView) for i in responses) @pytest.mark.asyncio @@ -3586,8 +4091,8 @@ async def test_list_views_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_views), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListViewsResponse( @@ -3596,17 +4101,17 @@ async def test_list_views_async_pages(): logging_config.LogView(), logging_config.LogView(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListViewsResponse( views=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListViewsResponse( views=[ logging_config.LogView(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListViewsResponse( views=[ @@ -3619,18 +4124,22 @@ async def test_list_views_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_views(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_config.GetViewRequest, - dict, -]) -def test_get_view(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetViewRequest, + dict, + ], +) +def test_get_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3641,14 +4150,12 @@ def test_get_view(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: + with mock.patch.object(type(client.transport.get_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client.get_view(request) @@ -3660,9 +4167,9 @@ def test_get_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test_get_view_non_empty_request_with_auto_populated_field(): @@ -3670,28 +4177,29 @@ def test_get_view_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetViewRequest( - name='name_value', + name="name_value", ) + def test_get_view_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3710,7 +4218,9 @@ def test_get_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_view] = mock_rpc request = {} client.get_view(request) @@ -3724,6 +4234,7 @@ def test_get_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3739,12 +4250,17 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_view in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_view + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_view] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_view + ] = mock_rpc request = {} await client.get_view(request) @@ -3758,8 +4274,11 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): +async def test_get_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3770,15 +4289,15 @@ async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logg request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) response = await client.get_view(request) # Establish that the underlying gRPC stub method was called. @@ -3789,15 +4308,16 @@ async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logg # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio async def test_get_view_async_from_dict(): await test_get_view_async(request_type=dict) + def test_get_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3807,12 +4327,10 @@ def test_get_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: + with mock.patch.object(type(client.transport.get_view), "__call__") as call: call.return_value = logging_config.LogView() client.get_view(request) @@ -3824,9 +4342,9 @@ def test_get_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3839,13 +4357,13 @@ async def test_get_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView() + ) await client.get_view(request) # Establish that the underlying gRPC stub method was called. @@ -3856,16 +4374,19 @@ async def test_get_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.CreateViewRequest, - dict, -]) -def test_create_view(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateViewRequest, + dict, + ], +) +def test_create_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3876,14 +4397,12 @@ def test_create_view(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client.create_view(request) @@ -3895,9 +4414,9 @@ def test_create_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test_create_view_non_empty_request_with_auto_populated_field(): @@ -3905,30 +4424,31 @@ def test_create_view_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateViewRequest( - parent='parent_value', - view_id='view_id_value', + parent="parent_value", + view_id="view_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateViewRequest( - parent='parent_value', - view_id='view_id_value', + parent="parent_value", + view_id="view_id_value", ) + def test_create_view_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3947,7 +4467,9 @@ def test_create_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_view] = mock_rpc request = {} client.create_view(request) @@ -3961,8 +4483,11 @@ def test_create_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3976,12 +4501,17 @@ async def test_create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_view in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_view + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_view] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_view + ] = mock_rpc request = {} await client.create_view(request) @@ -3995,8 +4525,11 @@ async def test_create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): +async def test_create_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4007,15 +4540,15 @@ async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=l request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) response = await client.create_view(request) # Establish that the underlying gRPC stub method was called. @@ -4026,15 +4559,16 @@ async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio async def test_create_view_async_from_dict(): await test_create_view_async(request_type=dict) + def test_create_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4044,12 +4578,10 @@ def test_create_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value = logging_config.LogView() client.create_view(request) @@ -4061,9 +4593,9 @@ def test_create_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4076,13 +4608,13 @@ async def test_create_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView() + ) await client.create_view(request) # Establish that the underlying gRPC stub method was called. @@ -4093,16 +4625,19 @@ async def test_create_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateViewRequest, - dict, -]) -def test_update_view(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateViewRequest, + dict, + ], +) +def test_update_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4113,14 +4648,12 @@ def test_update_view(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client.update_view(request) @@ -4132,9 +4665,9 @@ def test_update_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test_update_view_non_empty_request_with_auto_populated_field(): @@ -4142,28 +4675,29 @@ def test_update_view_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateViewRequest( - name='name_value', + name="name_value", ) + def test_update_view_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4182,7 +4716,9 @@ def test_update_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_view] = mock_rpc request = {} client.update_view(request) @@ -4196,8 +4732,11 @@ def test_update_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4211,12 +4750,17 @@ async def test_update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_view in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_view + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_view] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_view + ] = mock_rpc request = {} await client.update_view(request) @@ -4230,8 +4774,11 @@ async def test_update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): +async def test_update_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4242,15 +4789,15 @@ async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=l request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) response = await client.update_view(request) # Establish that the underlying gRPC stub method was called. @@ -4261,15 +4808,16 @@ async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio async def test_update_view_async_from_dict(): await test_update_view_async(request_type=dict) + def test_update_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4279,12 +4827,10 @@ def test_update_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: call.return_value = logging_config.LogView() client.update_view(request) @@ -4296,9 +4842,9 @@ def test_update_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4311,13 +4857,13 @@ async def test_update_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView() + ) await client.update_view(request) # Establish that the underlying gRPC stub method was called. @@ -4328,16 +4874,19 @@ async def test_update_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteViewRequest, - dict, -]) -def test_delete_view(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteViewRequest, + dict, + ], +) +def test_delete_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4348,9 +4897,7 @@ def test_delete_view(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_view(request) @@ -4370,28 +4917,29 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteViewRequest( - name='name_value', + name="name_value", ) + def test_delete_view_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4410,7 +4958,9 @@ def test_delete_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc request = {} client.delete_view(request) @@ -4424,8 +4974,11 @@ def test_delete_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4439,12 +4992,17 @@ async def test_delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_view in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_view + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_view] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_view + ] = mock_rpc request = {} await client.delete_view(request) @@ -4458,8 +5016,11 @@ async def test_delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): +async def test_delete_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4470,9 +5031,7 @@ async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=l request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_view(request) @@ -4491,6 +5050,7 @@ async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=l async def test_delete_view_async_from_dict(): await test_delete_view_async(request_type=dict) + def test_delete_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4500,12 +5060,10 @@ def test_delete_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = None client.delete_view(request) @@ -4517,9 +5075,9 @@ def test_delete_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4532,12 +5090,10 @@ async def test_delete_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_view(request) @@ -4549,16 +5105,19 @@ async def test_delete_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.ListSinksRequest, - dict, -]) -def test_list_sinks(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListSinksRequest, + dict, + ], +) +def test_list_sinks(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4569,12 +5128,10 @@ def test_list_sinks(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_sinks(request) @@ -4586,7 +5143,7 @@ def test_list_sinks(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_sinks_non_empty_request_with_auto_populated_field(): @@ -4594,30 +5151,31 @@ def test_list_sinks_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListSinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_sinks(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListSinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_sinks_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4636,7 +5194,9 @@ def test_list_sinks_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_sinks] = mock_rpc request = {} client.list_sinks(request) @@ -4650,6 +5210,7 @@ def test_list_sinks_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4665,12 +5226,17 @@ async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_as wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_sinks in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_sinks + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_sinks] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_sinks + ] = mock_rpc request = {} await client.list_sinks(request) @@ -4684,8 +5250,11 @@ async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): +async def test_list_sinks_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4696,13 +5265,13 @@ async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=lo request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. @@ -4713,13 +5282,14 @@ async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_sinks_async_from_dict(): await test_list_sinks_async(request_type=dict) + def test_list_sinks_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4729,12 +5299,10 @@ def test_list_sinks_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: call.return_value = logging_config.ListSinksResponse() client.list_sinks(request) @@ -4746,9 +5314,9 @@ def test_list_sinks_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4761,13 +5329,13 @@ async def test_list_sinks_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse() + ) await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. @@ -4778,9 +5346,9 @@ async def test_list_sinks_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_sinks_flattened(): @@ -4789,15 +5357,13 @@ def test_list_sinks_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_sinks( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -4805,7 +5371,7 @@ def test_list_sinks_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -4819,9 +5385,10 @@ def test_list_sinks_flattened_error(): with pytest.raises(ValueError): client.list_sinks( logging_config.ListSinksRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_sinks_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -4829,17 +5396,17 @@ async def test_list_sinks_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_sinks( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -4847,9 +5414,10 @@ async def test_list_sinks_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_sinks_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -4861,7 +5429,7 @@ async def test_list_sinks_flattened_error_async(): with pytest.raises(ValueError): await client.list_sinks( logging_config.ListSinksRequest(), - parent='parent_value', + parent="parent_value", ) @@ -4872,9 +5440,7 @@ def test_list_sinks_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListSinksResponse( @@ -4883,17 +5449,17 @@ def test_list_sinks_pager(transport_name: str = "grpc"): logging_config.LogSink(), logging_config.LogSink(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListSinksResponse( sinks=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListSinksResponse( sinks=[ logging_config.LogSink(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListSinksResponse( sinks=[ @@ -4908,9 +5474,7 @@ def test_list_sinks_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_sinks(request={}, retry=retry, timeout=timeout) @@ -4920,8 +5484,9 @@ def test_list_sinks_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_config.LogSink) - for i in results) + assert all(isinstance(i, logging_config.LogSink) for i in results) + + def test_list_sinks_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4929,9 +5494,7 @@ def test_list_sinks_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListSinksResponse( @@ -4940,17 +5503,17 @@ def test_list_sinks_pages(transport_name: str = "grpc"): logging_config.LogSink(), logging_config.LogSink(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListSinksResponse( sinks=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListSinksResponse( sinks=[ logging_config.LogSink(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListSinksResponse( sinks=[ @@ -4961,9 +5524,10 @@ def test_list_sinks_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_sinks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_sinks_async_pager(): client = ConfigServiceV2AsyncClient( @@ -4972,8 +5536,8 @@ async def test_list_sinks_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_sinks), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListSinksResponse( @@ -4982,17 +5546,17 @@ async def test_list_sinks_async_pager(): logging_config.LogSink(), logging_config.LogSink(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListSinksResponse( sinks=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListSinksResponse( sinks=[ logging_config.LogSink(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListSinksResponse( sinks=[ @@ -5002,15 +5566,16 @@ async def test_list_sinks_async_pager(): ), RuntimeError, ) - async_pager = await client.list_sinks(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_sinks( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogSink) - for i in responses) + assert all(isinstance(i, logging_config.LogSink) for i in responses) @pytest.mark.asyncio @@ -5021,8 +5586,8 @@ async def test_list_sinks_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_sinks), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListSinksResponse( @@ -5031,17 +5596,17 @@ async def test_list_sinks_async_pages(): logging_config.LogSink(), logging_config.LogSink(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListSinksResponse( sinks=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListSinksResponse( sinks=[ logging_config.LogSink(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListSinksResponse( sinks=[ @@ -5054,18 +5619,22 @@ async def test_list_sinks_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_sinks(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_config.GetSinkRequest, - dict, -]) -def test_get_sink(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetSinkRequest, + dict, + ], +) +def test_get_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5076,18 +5645,16 @@ def test_get_sink(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client.get_sink(request) @@ -5100,13 +5667,13 @@ def test_get_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5115,28 +5682,29 @@ def test_get_sink_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) + def test_get_sink_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5155,7 +5723,9 @@ def test_get_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_sink] = mock_rpc request = {} client.get_sink(request) @@ -5169,6 +5739,7 @@ def test_get_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5184,12 +5755,17 @@ async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_sink in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_sink + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_sink] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_sink + ] = mock_rpc request = {} await client.get_sink(request) @@ -5203,8 +5779,11 @@ async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): +async def test_get_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5215,20 +5794,20 @@ async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logg request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) response = await client.get_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5239,13 +5818,13 @@ async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logg # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5253,6 +5832,7 @@ async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logg async def test_get_sink_async_from_dict(): await test_get_sink_async(request_type=dict) + def test_get_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5262,12 +5842,10 @@ def test_get_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: call.return_value = logging_config.LogSink() client.get_sink(request) @@ -5279,9 +5857,9 @@ def test_get_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5294,13 +5872,13 @@ async def test_get_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) await client.get_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5311,9 +5889,9 @@ async def test_get_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] def test_get_sink_flattened(): @@ -5322,15 +5900,13 @@ def test_get_sink_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -5338,7 +5914,7 @@ def test_get_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @@ -5352,9 +5928,10 @@ def test_get_sink_flattened_error(): with pytest.raises(ValueError): client.get_sink( logging_config.GetSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) + @pytest.mark.asyncio async def test_get_sink_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -5362,17 +5939,17 @@ async def test_get_sink_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -5380,9 +5957,10 @@ async def test_get_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -5394,15 +5972,18 @@ async def test_get_sink_flattened_error_async(): with pytest.raises(ValueError): await client.get_sink( logging_config.GetSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.CreateSinkRequest, - dict, -]) -def test_create_sink(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateSinkRequest, + dict, + ], +) +def test_create_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5413,18 +5994,16 @@ def test_create_sink(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client.create_sink(request) @@ -5437,13 +6016,13 @@ def test_create_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5452,28 +6031,29 @@ def test_create_sink_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateSinkRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateSinkRequest( - parent='parent_value', + parent="parent_value", ) + def test_create_sink_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5492,7 +6072,9 @@ def test_create_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_sink] = mock_rpc request = {} client.create_sink(request) @@ -5506,8 +6088,11 @@ def test_create_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5521,12 +6106,17 @@ async def test_create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_sink in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_sink + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_sink] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_sink + ] = mock_rpc request = {} await client.create_sink(request) @@ -5540,8 +6130,11 @@ async def test_create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): +async def test_create_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5552,20 +6145,20 @@ async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=l request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) response = await client.create_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5576,13 +6169,13 @@ async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5590,6 +6183,7 @@ async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=l async def test_create_sink_async_from_dict(): await test_create_sink_async(request_type=dict) + def test_create_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5599,12 +6193,10 @@ def test_create_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: call.return_value = logging_config.LogSink() client.create_sink(request) @@ -5616,9 +6208,9 @@ def test_create_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5631,13 +6223,13 @@ async def test_create_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) await client.create_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5648,9 +6240,9 @@ async def test_create_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_sink_flattened(): @@ -5659,16 +6251,14 @@ def test_create_sink_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_sink( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -5676,10 +6266,10 @@ def test_create_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val @@ -5693,10 +6283,11 @@ def test_create_sink_flattened_error(): with pytest.raises(ValueError): client.create_sink( logging_config.CreateSinkRequest(), - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) + @pytest.mark.asyncio async def test_create_sink_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -5704,18 +6295,18 @@ async def test_create_sink_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_sink( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -5723,12 +6314,13 @@ async def test_create_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test_create_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -5740,16 +6332,19 @@ async def test_create_sink_flattened_error_async(): with pytest.raises(ValueError): await client.create_sink( logging_config.CreateSinkRequest(), - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateSinkRequest, - dict, -]) -def test_update_sink(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateSinkRequest, + dict, + ], +) +def test_update_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5760,18 +6355,16 @@ def test_update_sink(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client.update_sink(request) @@ -5784,13 +6377,13 @@ def test_update_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5799,28 +6392,29 @@ def test_update_sink_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) + def test_update_sink_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5839,7 +6433,9 @@ def test_update_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_sink] = mock_rpc request = {} client.update_sink(request) @@ -5853,8 +6449,11 @@ def test_update_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5868,12 +6467,17 @@ async def test_update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_sink in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_sink + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_sink] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_sink + ] = mock_rpc request = {} await client.update_sink(request) @@ -5887,8 +6491,11 @@ async def test_update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): +async def test_update_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5899,20 +6506,20 @@ async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=l request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) response = await client.update_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5923,13 +6530,13 @@ async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5937,6 +6544,7 @@ async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=l async def test_update_sink_async_from_dict(): await test_update_sink_async(request_type=dict) + def test_update_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5946,12 +6554,10 @@ def test_update_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: call.return_value = logging_config.LogSink() client.update_sink(request) @@ -5963,9 +6569,9 @@ def test_update_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5978,13 +6584,13 @@ async def test_update_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) await client.update_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5995,9 +6601,9 @@ async def test_update_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] def test_update_sink_flattened(): @@ -6006,17 +6612,15 @@ def test_update_sink_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_sink( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6024,13 +6628,13 @@ def test_update_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -6044,11 +6648,12 @@ def test_update_sink_flattened_error(): with pytest.raises(ValueError): client.update_sink( logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test_update_sink_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -6056,19 +6661,19 @@ async def test_update_sink_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_sink( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6076,15 +6681,16 @@ async def test_update_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test_update_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -6096,17 +6702,20 @@ async def test_update_sink_flattened_error_async(): with pytest.raises(ValueError): await client.update_sink( logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteSinkRequest, - dict, -]) -def test_delete_sink(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteSinkRequest, + dict, + ], +) +def test_delete_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6117,9 +6726,7 @@ def test_delete_sink(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_sink(request) @@ -6139,28 +6746,29 @@ def test_delete_sink_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) + def test_delete_sink_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6179,7 +6787,9 @@ def test_delete_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_sink] = mock_rpc request = {} client.delete_sink(request) @@ -6193,8 +6803,11 @@ def test_delete_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6208,12 +6821,17 @@ async def test_delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_sink in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_sink + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_sink] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_sink + ] = mock_rpc request = {} await client.delete_sink(request) @@ -6227,8 +6845,11 @@ async def test_delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): +async def test_delete_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6239,9 +6860,7 @@ async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=l request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_sink(request) @@ -6260,6 +6879,7 @@ async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=l async def test_delete_sink_async_from_dict(): await test_delete_sink_async(request_type=dict) + def test_delete_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -6269,12 +6889,10 @@ def test_delete_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = None client.delete_sink(request) @@ -6286,9 +6904,9 @@ def test_delete_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6301,12 +6919,10 @@ async def test_delete_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_sink(request) @@ -6318,9 +6934,9 @@ async def test_delete_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] def test_delete_sink_flattened(): @@ -6329,15 +6945,13 @@ def test_delete_sink_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -6345,7 +6959,7 @@ def test_delete_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @@ -6359,9 +6973,10 @@ def test_delete_sink_flattened_error(): with pytest.raises(ValueError): client.delete_sink( logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) + @pytest.mark.asyncio async def test_delete_sink_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -6369,9 +6984,7 @@ async def test_delete_sink_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -6379,7 +6992,7 @@ async def test_delete_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -6387,9 +7000,10 @@ async def test_delete_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -6401,15 +7015,18 @@ async def test_delete_sink_flattened_error_async(): with pytest.raises(ValueError): await client.delete_sink( logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.CreateLinkRequest, - dict, -]) -def test_create_link(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateLinkRequest, + dict, + ], +) +def test_create_link(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6420,11 +7037,9 @@ def test_create_link(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_link(request) # Establish that the underlying gRPC stub method was called. @@ -6442,30 +7057,31 @@ def test_create_link_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateLinkRequest( - parent='parent_value', - link_id='link_id_value', + parent="parent_value", + link_id="link_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateLinkRequest( - parent='parent_value', - link_id='link_id_value', + parent="parent_value", + link_id="link_id_value", ) + def test_create_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6484,7 +7100,9 @@ def test_create_link_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_link] = mock_rpc request = {} client.create_link(request) @@ -6503,8 +7121,11 @@ def test_create_link_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6518,12 +7139,17 @@ async def test_create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_link in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_link + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_link] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_link + ] = mock_rpc request = {} await client.create_link(request) @@ -6542,8 +7168,11 @@ async def test_create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateLinkRequest): +async def test_create_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateLinkRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6554,12 +7183,10 @@ async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=l request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.create_link(request) @@ -6577,6 +7204,7 @@ async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=l async def test_create_link_async_from_dict(): await test_create_link_async(request_type=dict) + def test_create_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -6586,13 +7214,11 @@ def test_create_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateLinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_link(request) # Establish that the underlying gRPC stub method was called. @@ -6603,9 +7229,9 @@ def test_create_link_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6618,13 +7244,13 @@ async def test_create_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateLinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.create_link(request) # Establish that the underlying gRPC stub method was called. @@ -6635,9 +7261,9 @@ async def test_create_link_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_link_flattened(): @@ -6646,17 +7272,15 @@ def test_create_link_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_link( - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) # Establish that the underlying call was made with the expected @@ -6664,13 +7288,13 @@ def test_create_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].link - mock_val = logging_config.Link(name='name_value') + mock_val = logging_config.Link(name="name_value") assert arg == mock_val arg = args[0].link_id - mock_val = 'link_id_value' + mock_val = "link_id_value" assert arg == mock_val @@ -6684,11 +7308,12 @@ def test_create_link_flattened_error(): with pytest.raises(ValueError): client.create_link( logging_config.CreateLinkRequest(), - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) + @pytest.mark.asyncio async def test_create_link_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -6696,21 +7321,19 @@ async def test_create_link_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_link( - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) # Establish that the underlying call was made with the expected @@ -6718,15 +7341,16 @@ async def test_create_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].link - mock_val = logging_config.Link(name='name_value') + mock_val = logging_config.Link(name="name_value") assert arg == mock_val arg = args[0].link_id - mock_val = 'link_id_value' + mock_val = "link_id_value" assert arg == mock_val + @pytest.mark.asyncio async def test_create_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -6738,17 +7362,20 @@ async def test_create_link_flattened_error_async(): with pytest.raises(ValueError): await client.create_link( logging_config.CreateLinkRequest(), - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteLinkRequest, - dict, -]) -def test_delete_link(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteLinkRequest, + dict, + ], +) +def test_delete_link(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6759,11 +7386,9 @@ def test_delete_link(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.delete_link(request) # Establish that the underlying gRPC stub method was called. @@ -6781,28 +7406,29 @@ def test_delete_link_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteLinkRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteLinkRequest( - name='name_value', + name="name_value", ) + def test_delete_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6821,7 +7447,9 @@ def test_delete_link_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_link] = mock_rpc request = {} client.delete_link(request) @@ -6840,8 +7468,11 @@ def test_delete_link_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6855,12 +7486,17 @@ async def test_delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_link in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_link + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_link] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_link + ] = mock_rpc request = {} await client.delete_link(request) @@ -6879,8 +7515,11 @@ async def test_delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteLinkRequest): +async def test_delete_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteLinkRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6891,12 +7530,10 @@ async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=l request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.delete_link(request) @@ -6914,6 +7551,7 @@ async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=l async def test_delete_link_async_from_dict(): await test_delete_link_async(request_type=dict) + def test_delete_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -6923,13 +7561,11 @@ def test_delete_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_link(request) # Establish that the underlying gRPC stub method was called. @@ -6940,9 +7576,9 @@ def test_delete_link_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6955,13 +7591,13 @@ async def test_delete_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.delete_link(request) # Establish that the underlying gRPC stub method was called. @@ -6972,9 +7608,9 @@ async def test_delete_link_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_link_flattened(): @@ -6983,15 +7619,13 @@ def test_delete_link_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6999,7 +7633,7 @@ def test_delete_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7013,9 +7647,10 @@ def test_delete_link_flattened_error(): with pytest.raises(ValueError): client.delete_link( logging_config.DeleteLinkRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_link_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -7023,19 +7658,17 @@ async def test_delete_link_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7043,9 +7676,10 @@ async def test_delete_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -7057,15 +7691,18 @@ async def test_delete_link_flattened_error_async(): with pytest.raises(ValueError): await client.delete_link( logging_config.DeleteLinkRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.ListLinksRequest, - dict, -]) -def test_list_links(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListLinksRequest, + dict, + ], +) +def test_list_links(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7076,12 +7713,10 @@ def test_list_links(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListLinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_links(request) @@ -7093,7 +7728,7 @@ def test_list_links(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_links_non_empty_request_with_auto_populated_field(): @@ -7101,30 +7736,31 @@ def test_list_links_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListLinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_links(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListLinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_links_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7143,7 +7779,9 @@ def test_list_links_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_links] = mock_rpc request = {} client.list_links(request) @@ -7157,6 +7795,7 @@ def test_list_links_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7172,12 +7811,17 @@ async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_as wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_links in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_links + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_links] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_links + ] = mock_rpc request = {} await client.list_links(request) @@ -7191,8 +7835,11 @@ async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListLinksRequest): +async def test_list_links_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListLinksRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7203,13 +7850,13 @@ async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=lo request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_links(request) # Establish that the underlying gRPC stub method was called. @@ -7220,13 +7867,14 @@ async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_links_async_from_dict(): await test_list_links_async(request_type=dict) + def test_list_links_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7236,12 +7884,10 @@ def test_list_links_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListLinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: call.return_value = logging_config.ListLinksResponse() client.list_links(request) @@ -7253,9 +7899,9 @@ def test_list_links_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7268,13 +7914,13 @@ async def test_list_links_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListLinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse()) + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse() + ) await client.list_links(request) # Establish that the underlying gRPC stub method was called. @@ -7285,9 +7931,9 @@ async def test_list_links_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_links_flattened(): @@ -7296,15 +7942,13 @@ def test_list_links_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_links( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -7312,7 +7956,7 @@ def test_list_links_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -7326,9 +7970,10 @@ def test_list_links_flattened_error(): with pytest.raises(ValueError): client.list_links( logging_config.ListLinksRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_links_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -7336,17 +7981,17 @@ async def test_list_links_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListLinksResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_links( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -7354,9 +7999,10 @@ async def test_list_links_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_links_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -7368,7 +8014,7 @@ async def test_list_links_flattened_error_async(): with pytest.raises(ValueError): await client.list_links( logging_config.ListLinksRequest(), - parent='parent_value', + parent="parent_value", ) @@ -7379,9 +8025,7 @@ def test_list_links_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListLinksResponse( @@ -7390,17 +8034,17 @@ def test_list_links_pager(transport_name: str = "grpc"): logging_config.Link(), logging_config.Link(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListLinksResponse( links=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListLinksResponse( links=[ logging_config.Link(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListLinksResponse( links=[ @@ -7415,9 +8059,7 @@ def test_list_links_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_links(request={}, retry=retry, timeout=timeout) @@ -7427,8 +8069,9 @@ def test_list_links_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_config.Link) - for i in results) + assert all(isinstance(i, logging_config.Link) for i in results) + + def test_list_links_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7436,9 +8079,7 @@ def test_list_links_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListLinksResponse( @@ -7447,17 +8088,17 @@ def test_list_links_pages(transport_name: str = "grpc"): logging_config.Link(), logging_config.Link(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListLinksResponse( links=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListLinksResponse( links=[ logging_config.Link(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListLinksResponse( links=[ @@ -7468,9 +8109,10 @@ def test_list_links_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_links(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_links_async_pager(): client = ConfigServiceV2AsyncClient( @@ -7479,8 +8121,8 @@ async def test_list_links_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_links), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_links), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListLinksResponse( @@ -7489,17 +8131,17 @@ async def test_list_links_async_pager(): logging_config.Link(), logging_config.Link(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListLinksResponse( links=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListLinksResponse( links=[ logging_config.Link(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListLinksResponse( links=[ @@ -7509,15 +8151,16 @@ async def test_list_links_async_pager(): ), RuntimeError, ) - async_pager = await client.list_links(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_links( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_config.Link) - for i in responses) + assert all(isinstance(i, logging_config.Link) for i in responses) @pytest.mark.asyncio @@ -7528,8 +8171,8 @@ async def test_list_links_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_links), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_links), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListLinksResponse( @@ -7538,17 +8181,17 @@ async def test_list_links_async_pages(): logging_config.Link(), logging_config.Link(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListLinksResponse( links=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListLinksResponse( links=[ logging_config.Link(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListLinksResponse( links=[ @@ -7561,18 +8204,22 @@ async def test_list_links_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_links(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_config.GetLinkRequest, - dict, -]) -def test_get_link(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetLinkRequest, + dict, + ], +) +def test_get_link(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7583,13 +8230,11 @@ def test_get_link(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Link( - name='name_value', - description='description_value', + name="name_value", + description="description_value", lifecycle_state=logging_config.LifecycleState.ACTIVE, ) response = client.get_link(request) @@ -7602,8 +8247,8 @@ def test_get_link(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -7612,28 +8257,29 @@ def test_get_link_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetLinkRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetLinkRequest( - name='name_value', + name="name_value", ) + def test_get_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7652,7 +8298,9 @@ def test_get_link_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_link] = mock_rpc request = {} client.get_link(request) @@ -7666,6 +8314,7 @@ def test_get_link_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7681,12 +8330,17 @@ async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_link in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_link + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_link] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_link + ] = mock_rpc request = {} await client.get_link(request) @@ -7700,8 +8354,11 @@ async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetLinkRequest): +async def test_get_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetLinkRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7712,15 +8369,15 @@ async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logg request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( - name='name_value', - description='description_value', - lifecycle_state=logging_config.LifecycleState.ACTIVE, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Link( + name="name_value", + description="description_value", + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) response = await client.get_link(request) # Establish that the underlying gRPC stub method was called. @@ -7731,8 +8388,8 @@ async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logg # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -7740,6 +8397,7 @@ async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logg async def test_get_link_async_from_dict(): await test_get_link_async(request_type=dict) + def test_get_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7749,12 +8407,10 @@ def test_get_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: call.return_value = logging_config.Link() client.get_link(request) @@ -7766,9 +8422,9 @@ def test_get_link_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7781,12 +8437,10 @@ async def test_get_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) await client.get_link(request) @@ -7798,9 +8452,9 @@ async def test_get_link_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_link_flattened(): @@ -7809,15 +8463,13 @@ def test_get_link_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Link() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7825,7 +8477,7 @@ def test_get_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7839,9 +8491,10 @@ def test_get_link_flattened_error(): with pytest.raises(ValueError): client.get_link( logging_config.GetLinkRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_link_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -7849,9 +8502,7 @@ async def test_get_link_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Link() @@ -7859,7 +8510,7 @@ async def test_get_link_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7867,9 +8518,10 @@ async def test_get_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -7881,15 +8533,18 @@ async def test_get_link_flattened_error_async(): with pytest.raises(ValueError): await client.get_link( logging_config.GetLinkRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.ListExclusionsRequest, - dict, -]) -def test_list_exclusions(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListExclusionsRequest, + dict, + ], +) +def test_list_exclusions(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7900,12 +8555,10 @@ def test_list_exclusions(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_exclusions(request) @@ -7917,7 +8570,7 @@ def test_list_exclusions(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_exclusions_non_empty_request_with_auto_populated_field(): @@ -7925,30 +8578,31 @@ def test_list_exclusions_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListExclusionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_exclusions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListExclusionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_exclusions_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7967,7 +8621,9 @@ def test_list_exclusions_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_exclusions] = mock_rpc request = {} client.list_exclusions(request) @@ -7981,8 +8637,11 @@ def test_list_exclusions_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_exclusions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_exclusions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7996,12 +8655,17 @@ async def test_list_exclusions_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_exclusions in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_exclusions + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_exclusions] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_exclusions + ] = mock_rpc request = {} await client.list_exclusions(request) @@ -8015,8 +8679,11 @@ async def test_list_exclusions_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): +async def test_list_exclusions_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -8027,13 +8694,13 @@ async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. @@ -8044,13 +8711,14 @@ async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_ty # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_exclusions_async_from_dict(): await test_list_exclusions_async(request_type=dict) + def test_list_exclusions_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8060,12 +8728,10 @@ def test_list_exclusions_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: call.return_value = logging_config.ListExclusionsResponse() client.list_exclusions(request) @@ -8077,9 +8743,9 @@ def test_list_exclusions_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -8092,13 +8758,13 @@ async def test_list_exclusions_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse() + ) await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. @@ -8109,9 +8775,9 @@ async def test_list_exclusions_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_exclusions_flattened(): @@ -8120,15 +8786,13 @@ def test_list_exclusions_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_exclusions( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -8136,7 +8800,7 @@ def test_list_exclusions_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -8150,9 +8814,10 @@ def test_list_exclusions_flattened_error(): with pytest.raises(ValueError): client.list_exclusions( logging_config.ListExclusionsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_exclusions_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -8160,17 +8825,17 @@ async def test_list_exclusions_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_exclusions( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -8178,9 +8843,10 @@ async def test_list_exclusions_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_exclusions_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -8192,7 +8858,7 @@ async def test_list_exclusions_flattened_error_async(): with pytest.raises(ValueError): await client.list_exclusions( logging_config.ListExclusionsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -8203,9 +8869,7 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListExclusionsResponse( @@ -8214,17 +8878,17 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): logging_config.LogExclusion(), logging_config.LogExclusion(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListExclusionsResponse( exclusions=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListExclusionsResponse( exclusions=[ logging_config.LogExclusion(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -8239,9 +8903,7 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_exclusions(request={}, retry=retry, timeout=timeout) @@ -8251,8 +8913,9 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in results) + assert all(isinstance(i, logging_config.LogExclusion) for i in results) + + def test_list_exclusions_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8260,9 +8923,7 @@ def test_list_exclusions_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListExclusionsResponse( @@ -8271,17 +8932,17 @@ def test_list_exclusions_pages(transport_name: str = "grpc"): logging_config.LogExclusion(), logging_config.LogExclusion(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListExclusionsResponse( exclusions=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListExclusionsResponse( exclusions=[ logging_config.LogExclusion(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -8292,9 +8953,10 @@ def test_list_exclusions_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_exclusions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_exclusions_async_pager(): client = ConfigServiceV2AsyncClient( @@ -8303,8 +8965,8 @@ async def test_list_exclusions_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListExclusionsResponse( @@ -8313,17 +8975,17 @@ async def test_list_exclusions_async_pager(): logging_config.LogExclusion(), logging_config.LogExclusion(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListExclusionsResponse( exclusions=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListExclusionsResponse( exclusions=[ logging_config.LogExclusion(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -8333,15 +8995,16 @@ async def test_list_exclusions_async_pager(): ), RuntimeError, ) - async_pager = await client.list_exclusions(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_exclusions( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in responses) + assert all(isinstance(i, logging_config.LogExclusion) for i in responses) @pytest.mark.asyncio @@ -8352,8 +9015,8 @@ async def test_list_exclusions_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListExclusionsResponse( @@ -8362,17 +9025,17 @@ async def test_list_exclusions_async_pages(): logging_config.LogExclusion(), logging_config.LogExclusion(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListExclusionsResponse( exclusions=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListExclusionsResponse( exclusions=[ logging_config.LogExclusion(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -8385,18 +9048,22 @@ async def test_list_exclusions_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_exclusions(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_config.GetExclusionRequest, - dict, -]) -def test_get_exclusion(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetExclusionRequest, + dict, + ], +) +def test_get_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8407,14 +9074,12 @@ def test_get_exclusion(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client.get_exclusion(request) @@ -8427,9 +9092,9 @@ def test_get_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8438,28 +9103,29 @@ def test_get_exclusion_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetExclusionRequest( - name='name_value', + name="name_value", ) + def test_get_exclusion_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8478,7 +9144,9 @@ def test_get_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_exclusion] = mock_rpc request = {} client.get_exclusion(request) @@ -8492,8 +9160,11 @@ def test_get_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8507,12 +9178,17 @@ async def test_get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_exclusion in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_exclusion + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_exclusion] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_exclusion + ] = mock_rpc request = {} await client.get_exclusion(request) @@ -8526,8 +9202,11 @@ async def test_get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): +async def test_get_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -8538,16 +9217,16 @@ async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) response = await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -8558,9 +9237,9 @@ async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8568,6 +9247,7 @@ async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type async def test_get_exclusion_async_from_dict(): await test_get_exclusion_async(request_type=dict) + def test_get_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8577,12 +9257,10 @@ def test_get_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client.get_exclusion(request) @@ -8594,9 +9272,9 @@ def test_get_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -8609,13 +9287,13 @@ async def test_get_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -8626,9 +9304,9 @@ async def test_get_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_exclusion_flattened(): @@ -8637,15 +9315,13 @@ def test_get_exclusion_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -8653,7 +9329,7 @@ def test_get_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -8667,9 +9343,10 @@ def test_get_exclusion_flattened_error(): with pytest.raises(ValueError): client.get_exclusion( logging_config.GetExclusionRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -8677,17 +9354,17 @@ async def test_get_exclusion_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -8695,9 +9372,10 @@ async def test_get_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -8709,15 +9387,18 @@ async def test_get_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client.get_exclusion( logging_config.GetExclusionRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.CreateExclusionRequest, - dict, -]) -def test_create_exclusion(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateExclusionRequest, + dict, + ], +) +def test_create_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8728,14 +9409,12 @@ def test_create_exclusion(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client.create_exclusion(request) @@ -8748,9 +9427,9 @@ def test_create_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8759,28 +9438,29 @@ def test_create_exclusion_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateExclusionRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateExclusionRequest( - parent='parent_value', + parent="parent_value", ) + def test_create_exclusion_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8799,8 +9479,12 @@ def test_create_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_exclusion] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_exclusion] = ( + mock_rpc + ) request = {} client.create_exclusion(request) @@ -8813,8 +9497,11 @@ def test_create_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8828,12 +9515,17 @@ async def test_create_exclusion_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_exclusion in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_exclusion + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_exclusion] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_exclusion + ] = mock_rpc request = {} await client.create_exclusion(request) @@ -8847,8 +9539,11 @@ async def test_create_exclusion_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): +async def test_create_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -8859,16 +9554,16 @@ async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_t request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) response = await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -8879,9 +9574,9 @@ async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8889,6 +9584,7 @@ async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_t async def test_create_exclusion_async_from_dict(): await test_create_exclusion_async(request_type=dict) + def test_create_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8898,12 +9594,10 @@ def test_create_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client.create_exclusion(request) @@ -8915,9 +9609,9 @@ def test_create_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -8930,13 +9624,13 @@ async def test_create_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -8947,9 +9641,9 @@ async def test_create_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_exclusion_flattened(): @@ -8958,16 +9652,14 @@ def test_create_exclusion_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_exclusion( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -8975,10 +9667,10 @@ def test_create_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val @@ -8992,10 +9684,11 @@ def test_create_exclusion_flattened_error(): with pytest.raises(ValueError): client.create_exclusion( logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) + @pytest.mark.asyncio async def test_create_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -9003,18 +9696,18 @@ async def test_create_exclusion_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_exclusion( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -9022,12 +9715,13 @@ async def test_create_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test_create_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -9039,16 +9733,19 @@ async def test_create_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client.create_exclusion( logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateExclusionRequest, - dict, -]) -def test_update_exclusion(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateExclusionRequest, + dict, + ], +) +def test_update_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9059,14 +9756,12 @@ def test_update_exclusion(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client.update_exclusion(request) @@ -9079,9 +9774,9 @@ def test_update_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -9090,28 +9785,29 @@ def test_update_exclusion_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateExclusionRequest( - name='name_value', + name="name_value", ) + def test_update_exclusion_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9130,8 +9826,12 @@ def test_update_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_exclusion] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_exclusion] = ( + mock_rpc + ) request = {} client.update_exclusion(request) @@ -9144,8 +9844,11 @@ def test_update_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -9159,12 +9862,17 @@ async def test_update_exclusion_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_exclusion in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_exclusion + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_exclusion] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_exclusion + ] = mock_rpc request = {} await client.update_exclusion(request) @@ -9178,8 +9886,11 @@ async def test_update_exclusion_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): +async def test_update_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -9190,16 +9901,16 @@ async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_t request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) response = await client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -9210,9 +9921,9 @@ async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -9220,6 +9931,7 @@ async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_t async def test_update_exclusion_async_from_dict(): await test_update_exclusion_async(request_type=dict) + def test_update_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9229,12 +9941,10 @@ def test_update_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client.update_exclusion(request) @@ -9246,9 +9956,9 @@ def test_update_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -9261,13 +9971,13 @@ async def test_update_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) await client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -9278,9 +9988,9 @@ async def test_update_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_update_exclusion_flattened(): @@ -9289,17 +9999,15 @@ def test_update_exclusion_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_exclusion( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -9307,13 +10015,13 @@ def test_update_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -9327,11 +10035,12 @@ def test_update_exclusion_flattened_error(): with pytest.raises(ValueError): client.update_exclusion( logging_config.UpdateExclusionRequest(), - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test_update_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -9339,19 +10048,19 @@ async def test_update_exclusion_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_exclusion( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -9359,15 +10068,16 @@ async def test_update_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test_update_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -9379,17 +10089,20 @@ async def test_update_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client.update_exclusion( logging_config.UpdateExclusionRequest(), - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteExclusionRequest, - dict, -]) -def test_delete_exclusion(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteExclusionRequest, + dict, + ], +) +def test_delete_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9400,9 +10113,7 @@ def test_delete_exclusion(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_exclusion(request) @@ -9422,28 +10133,29 @@ def test_delete_exclusion_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteExclusionRequest( - name='name_value', + name="name_value", ) + def test_delete_exclusion_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9462,8 +10174,12 @@ def test_delete_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_exclusion] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_exclusion] = ( + mock_rpc + ) request = {} client.delete_exclusion(request) @@ -9476,8 +10192,11 @@ def test_delete_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -9491,12 +10210,17 @@ async def test_delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_exclusion in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_exclusion + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_exclusion] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_exclusion + ] = mock_rpc request = {} await client.delete_exclusion(request) @@ -9510,8 +10234,11 @@ async def test_delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): +async def test_delete_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -9522,9 +10249,7 @@ async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_t request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_exclusion(request) @@ -9543,6 +10268,7 @@ async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_t async def test_delete_exclusion_async_from_dict(): await test_delete_exclusion_async(request_type=dict) + def test_delete_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9552,12 +10278,10 @@ def test_delete_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = None client.delete_exclusion(request) @@ -9569,9 +10293,9 @@ def test_delete_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -9584,12 +10308,10 @@ async def test_delete_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_exclusion(request) @@ -9601,9 +10323,9 @@ async def test_delete_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_exclusion_flattened(): @@ -9612,15 +10334,13 @@ def test_delete_exclusion_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -9628,7 +10348,7 @@ def test_delete_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -9642,9 +10362,10 @@ def test_delete_exclusion_flattened_error(): with pytest.raises(ValueError): client.delete_exclusion( logging_config.DeleteExclusionRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -9652,9 +10373,7 @@ async def test_delete_exclusion_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -9662,7 +10381,7 @@ async def test_delete_exclusion_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -9670,9 +10389,10 @@ async def test_delete_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -9684,15 +10404,18 @@ async def test_delete_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client.delete_exclusion( logging_config.DeleteExclusionRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.GetCmekSettingsRequest, - dict, -]) -def test_get_cmek_settings(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetCmekSettingsRequest, + dict, + ], +) +def test_get_cmek_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9704,14 +10427,14 @@ def test_get_cmek_settings(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: + type(client.transport.get_cmek_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", ) response = client.get_cmek_settings(request) @@ -9723,10 +10446,10 @@ def test_get_cmek_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): @@ -9734,28 +10457,31 @@ def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetCmekSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_cmek_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetCmekSettingsRequest( - name='name_value', + name="name_value", ) + def test_get_cmek_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9774,8 +10500,12 @@ def test_get_cmek_settings_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_cmek_settings] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_cmek_settings] = ( + mock_rpc + ) request = {} client.get_cmek_settings(request) @@ -9788,8 +10518,11 @@ def test_get_cmek_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_cmek_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -9803,12 +10536,17 @@ async def test_get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_cmek_settings in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_cmek_settings + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_cmek_settings] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_cmek_settings + ] = mock_rpc request = {} await client.get_cmek_settings(request) @@ -9822,8 +10560,11 @@ async def test_get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): +async def test_get_cmek_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -9835,15 +10576,17 @@ async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) + type(client.transport.get_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) response = await client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -9854,16 +10597,17 @@ async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" @pytest.mark.asyncio async def test_get_cmek_settings_async_from_dict(): await test_get_cmek_settings_async(request_type=dict) + def test_get_cmek_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9873,12 +10617,12 @@ def test_get_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: + type(client.transport.get_cmek_settings), "__call__" + ) as call: call.return_value = logging_config.CmekSettings() client.get_cmek_settings(request) @@ -9890,9 +10634,9 @@ def test_get_cmek_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -9905,13 +10649,15 @@ async def test_get_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings() + ) await client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -9922,16 +10668,19 @@ async def test_get_cmek_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateCmekSettingsRequest, - dict, -]) -def test_update_cmek_settings(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateCmekSettingsRequest, + dict, + ], +) +def test_update_cmek_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9943,14 +10692,14 @@ def test_update_cmek_settings(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: + type(client.transport.update_cmek_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", ) response = client.update_cmek_settings(request) @@ -9962,10 +10711,10 @@ def test_update_cmek_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): @@ -9973,28 +10722,31 @@ def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateCmekSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_cmek_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateCmekSettingsRequest( - name='name_value', + name="name_value", ) + def test_update_cmek_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10009,12 +10761,18 @@ def test_update_cmek_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_cmek_settings in client._transport._wrapped_methods + assert ( + client._transport.update_cmek_settings in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_cmek_settings] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_cmek_settings] = ( + mock_rpc + ) request = {} client.update_cmek_settings(request) @@ -10027,8 +10785,11 @@ def test_update_cmek_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_cmek_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10042,12 +10803,17 @@ async def test_update_cmek_settings_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_cmek_settings in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_cmek_settings + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_cmek_settings] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_cmek_settings + ] = mock_rpc request = {} await client.update_cmek_settings(request) @@ -10061,8 +10827,12 @@ async def test_update_cmek_settings_async_use_cached_wrapped_rpc(transport: str assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): +async def test_update_cmek_settings_async( + transport: str = "grpc_asyncio", + request_type=logging_config.UpdateCmekSettingsRequest, +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -10074,15 +10844,17 @@ async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', reque # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) + type(client.transport.update_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) response = await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10093,16 +10865,17 @@ async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', reque # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" @pytest.mark.asyncio async def test_update_cmek_settings_async_from_dict(): await test_update_cmek_settings_async(request_type=dict) + def test_update_cmek_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10112,12 +10885,12 @@ def test_update_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: + type(client.transport.update_cmek_settings), "__call__" + ) as call: call.return_value = logging_config.CmekSettings() client.update_cmek_settings(request) @@ -10129,9 +10902,9 @@ def test_update_cmek_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -10144,13 +10917,15 @@ async def test_update_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings() + ) await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10161,16 +10936,19 @@ async def test_update_cmek_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.GetSettingsRequest, - dict, -]) -def test_get_settings(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetSettingsRequest, + dict, + ], +) +def test_get_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10181,15 +10959,13 @@ def test_get_settings(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, ) response = client.get_settings(request) @@ -10202,10 +10978,10 @@ def test_get_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10214,28 +10990,29 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSettingsRequest( - name='name_value', + name="name_value", ) + def test_get_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10254,7 +11031,9 @@ def test_get_settings_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc request = {} client.get_settings(request) @@ -10268,8 +11047,11 @@ def test_get_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10283,12 +11065,17 @@ async def test_get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_settings in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_settings + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_settings] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_settings + ] = mock_rpc request = {} await client.get_settings(request) @@ -10302,8 +11089,11 @@ async def test_get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSettingsRequest): +async def test_get_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -10314,17 +11104,17 @@ async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) response = await client.get_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10335,10 +11125,10 @@ async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10346,6 +11136,7 @@ async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type= async def test_get_settings_async_from_dict(): await test_get_settings_async(request_type=dict) + def test_get_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10355,12 +11146,10 @@ def test_get_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: call.return_value = logging_config.Settings() client.get_settings(request) @@ -10372,9 +11161,9 @@ def test_get_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -10387,13 +11176,13 @@ async def test_get_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) await client.get_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10404,9 +11193,9 @@ async def test_get_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_settings_flattened(): @@ -10415,15 +11204,13 @@ def test_get_settings_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_settings( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -10431,7 +11218,7 @@ def test_get_settings_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -10445,9 +11232,10 @@ def test_get_settings_flattened_error(): with pytest.raises(ValueError): client.get_settings( logging_config.GetSettingsRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_settings_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -10455,17 +11243,17 @@ async def test_get_settings_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_settings( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -10473,9 +11261,10 @@ async def test_get_settings_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -10487,15 +11276,18 @@ async def test_get_settings_flattened_error_async(): with pytest.raises(ValueError): await client.get_settings( logging_config.GetSettingsRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateSettingsRequest, - dict, -]) -def test_update_settings(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateSettingsRequest, + dict, + ], +) +def test_update_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10506,15 +11298,13 @@ def test_update_settings(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, ) response = client.update_settings(request) @@ -10527,10 +11317,10 @@ def test_update_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10539,28 +11329,29 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSettingsRequest( - name='name_value', + name="name_value", ) + def test_update_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10579,7 +11370,9 @@ def test_update_settings_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc request = {} client.update_settings(request) @@ -10593,8 +11386,11 @@ def test_update_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10608,12 +11404,17 @@ async def test_update_settings_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_settings in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_settings + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_settings] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_settings + ] = mock_rpc request = {} await client.update_settings(request) @@ -10627,8 +11428,11 @@ async def test_update_settings_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSettingsRequest): +async def test_update_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -10639,17 +11443,17 @@ async def test_update_settings_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) response = await client.update_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10660,10 +11464,10 @@ async def test_update_settings_async(transport: str = 'grpc_asyncio', request_ty # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10671,6 +11475,7 @@ async def test_update_settings_async(transport: str = 'grpc_asyncio', request_ty async def test_update_settings_async_from_dict(): await test_update_settings_async(request_type=dict) + def test_update_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10680,12 +11485,10 @@ def test_update_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: call.return_value = logging_config.Settings() client.update_settings(request) @@ -10697,9 +11500,9 @@ def test_update_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -10712,13 +11515,13 @@ async def test_update_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) await client.update_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10729,9 +11532,9 @@ async def test_update_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_update_settings_flattened(): @@ -10740,16 +11543,14 @@ def test_update_settings_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_settings( - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -10757,10 +11558,10 @@ def test_update_settings_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].settings - mock_val = logging_config.Settings(name='name_value') + mock_val = logging_config.Settings(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -10774,10 +11575,11 @@ def test_update_settings_flattened_error(): with pytest.raises(ValueError): client.update_settings( logging_config.UpdateSettingsRequest(), - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test_update_settings_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -10785,18 +11587,18 @@ async def test_update_settings_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_settings( - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -10804,12 +11606,13 @@ async def test_update_settings_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].settings - mock_val = logging_config.Settings(name='name_value') + mock_val = logging_config.Settings(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test_update_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -10821,16 +11624,19 @@ async def test_update_settings_flattened_error_async(): with pytest.raises(ValueError): await client.update_settings( logging_config.UpdateSettingsRequest(), - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - logging_config.CopyLogEntriesRequest, - dict, -]) -def test_copy_log_entries(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CopyLogEntriesRequest, + dict, + ], +) +def test_copy_log_entries(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10841,11 +11647,9 @@ def test_copy_log_entries(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.copy_log_entries(request) # Establish that the underlying gRPC stub method was called. @@ -10863,32 +11667,33 @@ def test_copy_log_entries_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CopyLogEntriesRequest( - name='name_value', - filter='filter_value', - destination='destination_value', + name="name_value", + filter="filter_value", + destination="destination_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.copy_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CopyLogEntriesRequest( - name='name_value', - filter='filter_value', - destination='destination_value', + name="name_value", + filter="filter_value", + destination="destination_value", ) + def test_copy_log_entries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10907,8 +11712,12 @@ def test_copy_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.copy_log_entries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.copy_log_entries] = ( + mock_rpc + ) request = {} client.copy_log_entries(request) @@ -10926,8 +11735,11 @@ def test_copy_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_copy_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10941,12 +11753,17 @@ async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.copy_log_entries in client._client._transport._wrapped_methods + assert ( + client._client._transport.copy_log_entries + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.copy_log_entries] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.copy_log_entries + ] = mock_rpc request = {} await client.copy_log_entries(request) @@ -10965,8 +11782,11 @@ async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): +async def test_copy_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest +): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -10977,12 +11797,10 @@ async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_t request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.copy_log_entries(request) @@ -11039,8 +11857,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = ConfigServiceV2Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -11062,6 +11879,7 @@ def test_transport_instance(): client = ConfigServiceV2Client(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( @@ -11076,17 +11894,22 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = ConfigServiceV2Client.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -11096,8 +11919,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -11111,9 +11933,7 @@ def test_list_buckets_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: call.return_value = logging_config.ListBucketsResponse() client.list_buckets(request=None) @@ -11134,9 +11954,7 @@ def test_get_bucket_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.get_bucket(request=None) @@ -11158,9 +11976,9 @@ def test_create_bucket_async_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_bucket_async(request=None) # Establish that the underlying stub method was called. @@ -11181,9 +11999,9 @@ def test_update_bucket_async_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_bucket_async(request=None) # Establish that the underlying stub method was called. @@ -11203,9 +12021,7 @@ def test_create_bucket_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.create_bucket(request=None) @@ -11226,9 +12042,7 @@ def test_update_bucket_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.update_bucket(request=None) @@ -11249,9 +12063,7 @@ def test_delete_bucket_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = None client.delete_bucket(request=None) @@ -11272,9 +12084,7 @@ def test_undelete_bucket_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = None client.undelete_bucket(request=None) @@ -11295,9 +12105,7 @@ def test_list_views_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: call.return_value = logging_config.ListViewsResponse() client.list_views(request=None) @@ -11318,9 +12126,7 @@ def test_get_view_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: + with mock.patch.object(type(client.transport.get_view), "__call__") as call: call.return_value = logging_config.LogView() client.get_view(request=None) @@ -11341,9 +12147,7 @@ def test_create_view_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value = logging_config.LogView() client.create_view(request=None) @@ -11364,9 +12168,7 @@ def test_update_view_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: call.return_value = logging_config.LogView() client.update_view(request=None) @@ -11387,9 +12189,7 @@ def test_delete_view_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = None client.delete_view(request=None) @@ -11410,9 +12210,7 @@ def test_list_sinks_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: call.return_value = logging_config.ListSinksResponse() client.list_sinks(request=None) @@ -11433,9 +12231,7 @@ def test_get_sink_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: call.return_value = logging_config.LogSink() client.get_sink(request=None) @@ -11456,9 +12252,7 @@ def test_create_sink_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: call.return_value = logging_config.LogSink() client.create_sink(request=None) @@ -11479,9 +12273,7 @@ def test_update_sink_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: call.return_value = logging_config.LogSink() client.update_sink(request=None) @@ -11502,9 +12294,7 @@ def test_delete_sink_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = None client.delete_sink(request=None) @@ -11525,10 +12315,8 @@ def test_create_link_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_link(request=None) # Establish that the underlying stub method was called. @@ -11548,10 +12336,8 @@ def test_delete_link_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_link(request=None) # Establish that the underlying stub method was called. @@ -11571,9 +12357,7 @@ def test_list_links_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: call.return_value = logging_config.ListLinksResponse() client.list_links(request=None) @@ -11594,9 +12378,7 @@ def test_get_link_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: call.return_value = logging_config.Link() client.get_link(request=None) @@ -11617,9 +12399,7 @@ def test_list_exclusions_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: call.return_value = logging_config.ListExclusionsResponse() client.list_exclusions(request=None) @@ -11640,9 +12420,7 @@ def test_get_exclusion_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client.get_exclusion(request=None) @@ -11663,9 +12441,7 @@ def test_create_exclusion_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client.create_exclusion(request=None) @@ -11686,9 +12462,7 @@ def test_update_exclusion_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client.update_exclusion(request=None) @@ -11709,9 +12483,7 @@ def test_delete_exclusion_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = None client.delete_exclusion(request=None) @@ -11733,8 +12505,8 @@ def test_get_cmek_settings_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: + type(client.transport.get_cmek_settings), "__call__" + ) as call: call.return_value = logging_config.CmekSettings() client.get_cmek_settings(request=None) @@ -11756,8 +12528,8 @@ def test_update_cmek_settings_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: + type(client.transport.update_cmek_settings), "__call__" + ) as call: call.return_value = logging_config.CmekSettings() client.update_cmek_settings(request=None) @@ -11778,9 +12550,7 @@ def test_get_settings_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: call.return_value = logging_config.Settings() client.get_settings(request=None) @@ -11801,9 +12571,7 @@ def test_update_settings_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: call.return_value = logging_config.Settings() client.update_settings(request=None) @@ -11824,10 +12592,8 @@ def test_copy_log_entries_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.copy_log_entries(request=None) # Establish that the underlying stub method was called. @@ -11847,8 +12613,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -11863,13 +12628,13 @@ async def test_list_buckets_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_buckets(request=None) # Establish that the underlying stub method was called. @@ -11890,19 +12655,19 @@ async def test_get_bucket_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) await client.get_bucket(request=None) # Establish that the underlying stub method was called. @@ -11924,11 +12689,11 @@ async def test_create_bucket_async_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: + type(client.transport.create_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.create_bucket_async(request=None) @@ -11951,11 +12716,11 @@ async def test_update_bucket_async_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: + type(client.transport.update_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.update_bucket_async(request=None) @@ -11976,20 +12741,20 @@ async def test_create_bucket_empty_call_grpc_asyncio(): transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) await client.create_bucket(request=None) # Establish that the underlying stub method was called. @@ -12010,19 +12775,19 @@ async def test_update_bucket_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) await client.update_bucket(request=None) # Establish that the underlying stub method was called. @@ -12043,9 +12808,7 @@ async def test_delete_bucket_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_bucket(request=None) @@ -12068,9 +12831,7 @@ async def test_undelete_bucket_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.undelete_bucket(request=None) @@ -12093,13 +12854,13 @@ async def test_list_views_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_views(request=None) # Establish that the underlying stub method was called. @@ -12120,15 +12881,15 @@ async def test_get_view_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) await client.get_view(request=None) # Establish that the underlying stub method was called. @@ -12149,15 +12910,15 @@ async def test_create_view_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) await client.create_view(request=None) # Establish that the underlying stub method was called. @@ -12178,15 +12939,15 @@ async def test_update_view_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) await client.update_view(request=None) # Establish that the underlying stub method was called. @@ -12207,9 +12968,7 @@ async def test_delete_view_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_view(request=None) @@ -12232,13 +12991,13 @@ async def test_list_sinks_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_sinks(request=None) # Establish that the underlying stub method was called. @@ -12259,20 +13018,20 @@ async def test_get_sink_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) await client.get_sink(request=None) # Establish that the underlying stub method was called. @@ -12293,20 +13052,20 @@ async def test_create_sink_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) await client.create_sink(request=None) # Establish that the underlying stub method was called. @@ -12327,20 +13086,20 @@ async def test_update_sink_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) await client.update_sink(request=None) # Establish that the underlying stub method was called. @@ -12361,9 +13120,7 @@ async def test_delete_sink_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_sink(request=None) @@ -12386,12 +13143,10 @@ async def test_create_link_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.create_link(request=None) @@ -12413,12 +13168,10 @@ async def test_delete_link_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.delete_link(request=None) @@ -12440,13 +13193,13 @@ async def test_list_links_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_links(request=None) # Establish that the underlying stub method was called. @@ -12467,15 +13220,15 @@ async def test_get_link_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( - name='name_value', - description='description_value', - lifecycle_state=logging_config.LifecycleState.ACTIVE, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Link( + name="name_value", + description="description_value", + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) await client.get_link(request=None) # Establish that the underlying stub method was called. @@ -12496,13 +13249,13 @@ async def test_list_exclusions_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_exclusions(request=None) # Establish that the underlying stub method was called. @@ -12523,16 +13276,16 @@ async def test_get_exclusion_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) await client.get_exclusion(request=None) # Establish that the underlying stub method was called. @@ -12553,16 +13306,16 @@ async def test_create_exclusion_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) await client.create_exclusion(request=None) # Establish that the underlying stub method was called. @@ -12583,16 +13336,16 @@ async def test_update_exclusion_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) await client.update_exclusion(request=None) # Establish that the underlying stub method was called. @@ -12613,9 +13366,7 @@ async def test_delete_exclusion_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_exclusion(request=None) @@ -12639,15 +13390,17 @@ async def test_get_cmek_settings_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) + type(client.transport.get_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) await client.get_cmek_settings(request=None) # Establish that the underlying stub method was called. @@ -12669,15 +13422,17 @@ async def test_update_cmek_settings_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) + type(client.transport.update_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) await client.update_cmek_settings(request=None) # Establish that the underlying stub method was called. @@ -12698,17 +13453,17 @@ async def test_get_settings_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) await client.get_settings(request=None) # Establish that the underlying stub method was called. @@ -12729,17 +13484,17 @@ async def test_update_settings_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) await client.update_settings(request=None) # Establish that the underlying stub method was called. @@ -12760,12 +13515,10 @@ async def test_copy_log_entries_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.copy_log_entries(request=None) @@ -12787,18 +13540,21 @@ def test_transport_grpc_default(): transports.ConfigServiceV2GrpcTransport, ) + def test_config_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.ConfigServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_config_service_v2_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__') as Transport: + with mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__" + ) as Transport: Transport.return_value = None transport = transports.ConfigServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), @@ -12807,41 +13563,41 @@ def test_config_service_v2_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_buckets', - 'get_bucket', - 'create_bucket_async', - 'update_bucket_async', - 'create_bucket', - 'update_bucket', - 'delete_bucket', - 'undelete_bucket', - 'list_views', - 'get_view', - 'create_view', - 'update_view', - 'delete_view', - 'list_sinks', - 'get_sink', - 'create_sink', - 'update_sink', - 'delete_sink', - 'create_link', - 'delete_link', - 'list_links', - 'get_link', - 'list_exclusions', - 'get_exclusion', - 'create_exclusion', - 'update_exclusion', - 'delete_exclusion', - 'get_cmek_settings', - 'update_cmek_settings', - 'get_settings', - 'update_settings', - 'copy_log_entries', - 'get_operation', - 'cancel_operation', - 'list_operations', + "list_buckets", + "get_bucket", + "create_bucket_async", + "update_bucket_async", + "create_bucket", + "update_bucket", + "delete_bucket", + "undelete_bucket", + "list_views", + "get_view", + "create_view", + "update_view", + "delete_view", + "list_sinks", + "get_sink", + "create_sink", + "update_sink", + "delete_sink", + "create_link", + "delete_link", + "list_links", + "get_link", + "list_exclusions", + "get_exclusion", + "create_exclusion", + "update_exclusion", + "delete_exclusion", + "get_cmek_settings", + "update_cmek_settings", + "get_settings", + "update_settings", + "copy_log_entries", + "get_operation", + "cancel_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -12857,7 +13613,7 @@ def test_config_service_v2_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -12866,28 +13622,41 @@ def test_config_service_v2_base_transport(): def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), quota_project_id="octopus", ) def test_config_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport() @@ -12896,17 +13665,17 @@ def test_config_service_v2_base_transport_with_adc(): def test_config_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) ConfigServiceV2Client() adc.assert_called_once_with( scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), quota_project_id=None, ) @@ -12921,12 +13690,17 @@ def test_config_service_v2_auth_adc(): def test_config_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), quota_project_id="octopus", ) @@ -12939,39 +13713,39 @@ def test_config_service_v2_transport_auth_adc(transport_class): ], ) def test_config_service_v2_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.ConfigServiceV2GrpcTransport, grpc_helpers), - (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "logging.googleapis.com:443", @@ -12979,11 +13753,11 @@ def test_config_service_v2_transport_create_channel(transport_class, grpc_helper credentials_file=None, quota_project_id="octopus", default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), scopes=["1", "2"], default_host="logging.googleapis.com", ssl_credentials=None, @@ -12994,10 +13768,14 @@ def test_config_service_v2_transport_create_channel(transport_class, grpc_helper ) -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) -def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -13006,7 +13784,7 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -13027,45 +13805,52 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_config_service_v2_host_no_port(transport_name): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'logging.googleapis.com:443' + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com" + ), + transport=transport_name, ) + assert client.transport._host == ("logging.googleapis.com:443") + -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_config_service_v2_host_with_port(transport_name): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com:8000" + ), transport=transport_name, ) - assert client.transport._host == ( - 'logging.googleapis.com:8000' - ) + assert client.transport._host == ("logging.googleapis.com:8000") + def test_config_service_v2_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.ConfigServiceV2GrpcTransport( @@ -13078,7 +13863,7 @@ def test_config_service_v2_grpc_transport_channel(): def test_config_service_v2_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.ConfigServiceV2GrpcAsyncIOTransport( @@ -13093,12 +13878,22 @@ def test_config_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) def test_config_service_v2_transport_channel_mtls_with_client_cert_source( - transport_class + transport_class, ): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -13107,7 +13902,7 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -13137,17 +13932,23 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) -def test_config_service_v2_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -13178,7 +13979,7 @@ def test_config_service_v2_transport_channel_mtls_with_adc( def test_config_service_v2_grpc_lro_client(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) transport = client.transport @@ -13195,7 +13996,7 @@ def test_config_service_v2_grpc_lro_client(): def test_config_service_v2_grpc_lro_async_client(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', + transport="grpc_asyncio", ) transport = client.transport @@ -13211,7 +14012,9 @@ def test_config_service_v2_grpc_lro_async_client(): def test_cmek_settings_path(): project = "squid" - expected = "projects/{project}/cmekSettings".format(project=project, ) + expected = "projects/{project}/cmekSettings".format( + project=project, + ) actual = ConfigServiceV2Client.cmek_settings_path(project) assert expected == actual @@ -13226,12 +14029,20 @@ def test_parse_cmek_settings_path(): actual = ConfigServiceV2Client.parse_cmek_settings_path(path) assert expected == actual + def test_link_path(): project = "whelk" location = "octopus" bucket = "oyster" link = "nudibranch" - expected = "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + expected = ( + "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format( + project=project, + location=location, + bucket=bucket, + link=link, + ) + ) actual = ConfigServiceV2Client.link_path(project, location, bucket, link) assert expected == actual @@ -13249,11 +14060,16 @@ def test_parse_link_path(): actual = ConfigServiceV2Client.parse_link_path(path) assert expected == actual + def test_log_bucket_path(): project = "scallop" location = "abalone" bucket = "squid" - expected = "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( + project=project, + location=location, + bucket=bucket, + ) actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) assert expected == actual @@ -13270,10 +14086,14 @@ def test_parse_log_bucket_path(): actual = ConfigServiceV2Client.parse_log_bucket_path(path) assert expected == actual + def test_log_exclusion_path(): project = "oyster" exclusion = "nudibranch" - expected = "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + expected = "projects/{project}/exclusions/{exclusion}".format( + project=project, + exclusion=exclusion, + ) actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) assert expected == actual @@ -13289,10 +14109,14 @@ def test_parse_log_exclusion_path(): actual = ConfigServiceV2Client.parse_log_exclusion_path(path) assert expected == actual + def test_log_sink_path(): project = "winkle" sink = "nautilus" - expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + expected = "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) actual = ConfigServiceV2Client.log_sink_path(project, sink) assert expected == actual @@ -13308,12 +14132,20 @@ def test_parse_log_sink_path(): actual = ConfigServiceV2Client.parse_log_sink_path(path) assert expected == actual + def test_log_view_path(): project = "squid" location = "clam" bucket = "whelk" view = "octopus" - expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + expected = ( + "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, + location=location, + bucket=bucket, + view=view, + ) + ) actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) assert expected == actual @@ -13331,9 +14163,12 @@ def test_parse_log_view_path(): actual = ConfigServiceV2Client.parse_log_view_path(path) assert expected == actual + def test_settings_path(): project = "winkle" - expected = "projects/{project}/settings".format(project=project, ) + expected = "projects/{project}/settings".format( + project=project, + ) actual = ConfigServiceV2Client.settings_path(project) assert expected == actual @@ -13348,9 +14183,12 @@ def test_parse_settings_path(): actual = ConfigServiceV2Client.parse_settings_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = ConfigServiceV2Client.common_billing_account_path(billing_account) assert expected == actual @@ -13365,9 +14203,12 @@ def test_parse_common_billing_account_path(): actual = ConfigServiceV2Client.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = ConfigServiceV2Client.common_folder_path(folder) assert expected == actual @@ -13382,9 +14223,12 @@ def test_parse_common_folder_path(): actual = ConfigServiceV2Client.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = ConfigServiceV2Client.common_organization_path(organization) assert expected == actual @@ -13399,9 +14243,12 @@ def test_parse_common_organization_path(): actual = ConfigServiceV2Client.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "oyster" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = ConfigServiceV2Client.common_project_path(project) assert expected == actual @@ -13416,10 +14263,14 @@ def test_parse_common_project_path(): actual = ConfigServiceV2Client.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "cuttlefish" location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = ConfigServiceV2Client.common_location_path(project, location) assert expected == actual @@ -13439,14 +14290,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.ConfigServiceV2Transport, "_prep_wrapped_messages" + ) as prep: client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.ConfigServiceV2Transport, "_prep_wrapped_messages" + ) as prep: transport_class = ConfigServiceV2Client.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -13457,7 +14312,8 @@ def test_client_with_default_client_info(): def test_cancel_operation(transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13477,10 +14333,12 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13490,9 +14348,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13515,7 +14371,7 @@ def test_cancel_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. @@ -13525,7 +14381,11 @@ def test_cancel_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): @@ -13540,9 +14400,7 @@ async def test_cancel_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13551,7 +14409,10 @@ async def test_cancel_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_cancel_operation_from_dict(): @@ -13570,6 +14431,7 @@ def test_cancel_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = ConfigServiceV2AsyncClient( @@ -13578,9 +14440,7 @@ async def test_cancel_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation( request={ "name": "locations", @@ -13604,6 +14464,7 @@ def test_cancel_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.CancelOperationRequest() + @pytest.mark.asyncio async def test_cancel_operation_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -13612,9 +14473,7 @@ async def test_cancel_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13624,7 +14483,8 @@ async def test_cancel_operation_flattened_async(): def test_get_operation(transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13644,10 +14504,12 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13692,7 +14554,11 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): @@ -13718,7 +14584,10 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_get_operation_from_dict(): @@ -13737,6 +14606,7 @@ def test_get_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = ConfigServiceV2AsyncClient( @@ -13771,6 +14641,7 @@ def test_get_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.GetOperationRequest() + @pytest.mark.asyncio async def test_get_operation_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -13791,7 +14662,8 @@ async def test_get_operation_flattened_async(): def test_list_operations(transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13811,10 +14683,12 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13859,7 +14733,11 @@ def test_list_operations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_operations_field_headers_async(): @@ -13885,7 +14763,10 @@ async def test_list_operations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_operations_from_dict(): @@ -13904,6 +14785,7 @@ def test_list_operations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = ConfigServiceV2AsyncClient( @@ -13938,6 +14820,7 @@ def test_list_operations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.ListOperationsRequest() + @pytest.mark.asyncio async def test_list_operations_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -13958,10 +14841,11 @@ async def test_list_operations_flattened_async(): def test_transport_close_grpc(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -13970,10 +14854,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -13981,12 +14866,11 @@ async def test_transport_close_grpc_asyncio(): def test_client_ctx(): transports = [ - 'grpc', + "grpc", ] for transport in transports: client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -13995,10 +14879,14 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -14013,7 +14901,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e03a5e8bb8d2..7052b092f8ac 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,39 +22,24 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio import json import math +from collections.abc import Mapping, Sequence + +import grpc import pytest -from collections.abc import Sequence, Mapping from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from grpc.experimental import aio from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2AsyncClient -from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client -from google.cloud.logging_v2.services.logging_service_v2 import pagers -from google.cloud.logging_v2.services.logging_service_v2 import transports -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore import google.auth import google.logging.type.http_request_pb2 as http_request_pb2 # type: ignore @@ -62,8 +48,26 @@ import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.struct_pb2 as struct_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - - +from google.api_core import ( + client_options, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.logging_service_v2 import ( + LoggingServiceV2AsyncClient, + LoggingServiceV2Client, + pagers, + transports, +) +from google.cloud.logging_v2.types import log_entry, logging +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -78,9 +82,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -88,17 +94,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -110,21 +126,48 @@ def test__get_default_mtls_endpoint(): custom_endpoint = ".custom" assert LoggingServiceV2Client._get_default_mtls_endpoint(None) is None - assert LoggingServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - assert LoggingServiceV2Client._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + def test__read_environment_variables(): assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert LoggingServiceV2Client._read_environment_variables() == (True, "auto", None) + assert LoggingServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} @@ -138,27 +181,46 @@ def test__read_environment_variables(): ) else: assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert LoggingServiceV2Client._read_environment_variables() == ( False, - "auto", + "never", None, ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "never", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "always", None) + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: LoggingServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -167,7 +229,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert LoggingServiceV2Client._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -175,7 +239,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert LoggingServiceV2Client._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -187,7 +253,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert LoggingServiceV2Client._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -199,7 +267,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert LoggingServiceV2Client._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -211,7 +281,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert LoggingServiceV2Client._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -226,83 +298,167 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): LoggingServiceV2Client._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert LoggingServiceV2Client._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert LoggingServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert LoggingServiceV2Client._get_client_cert_source(None, False) is None - assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None - assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + LoggingServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + LoggingServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert LoggingServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source - assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert LoggingServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert LoggingServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + LoggingServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "always") + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert LoggingServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert LoggingServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert LoggingServiceV2Client._get_universe_domain(None, None) == LoggingServiceV2Client._DEFAULT_UNIVERSE + assert ( + LoggingServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + LoggingServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + LoggingServiceV2Client._get_universe_domain(None, None) + == LoggingServiceV2Client._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: LoggingServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -318,7 +474,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -331,59 +488,83 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (LoggingServiceV2Client, "grpc"), - (LoggingServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_logging_service_v2_client_from_service_account_info(client_class, transport_name): + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_logging_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.LoggingServiceV2GrpcTransport, "grpc"), - (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_logging_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.LoggingServiceV2GrpcTransport, "grpc"), + (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_logging_service_v2_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (LoggingServiceV2Client, "grpc"), - (LoggingServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_logging_service_v2_client_from_service_account_file(client_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_logging_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") def test_logging_service_v2_client_get_transport_class(): @@ -397,29 +578,44 @@ def test_logging_service_v2_client_get_transport_class(): assert transport == transports.LoggingServiceV2GrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) -def test_logging_service_v2_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) +def test_logging_service_v2_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(LoggingServiceV2Client, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: + with mock.patch.object(LoggingServiceV2Client, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -437,13 +633,15 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -455,7 +653,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -475,17 +673,22 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -494,46 +697,90 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" + api_audience="https://language.googleapis.com", ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "true"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "false"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + "true", + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + "false", + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_logging_service_v2_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -552,12 +799,22 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -578,15 +835,22 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -596,19 +860,31 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, ) -@pytest.mark.parametrize("client_class", [ - LoggingServiceV2Client, LoggingServiceV2AsyncClient -]) -@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +@pytest.mark.parametrize( + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient] +) +@mock.patch.object( + LoggingServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2AsyncClient), +) def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -616,18 +892,25 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -664,23 +947,23 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -711,23 +994,23 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -743,16 +1026,27 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -762,27 +1056,50 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) -@pytest.mark.parametrize("client_class", [ - LoggingServiceV2Client, LoggingServiceV2AsyncClient -]) -@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) + +@pytest.mark.parametrize( + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient] +) +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) def test_logging_service_v2_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -805,11 +1122,19 @@ def test_logging_service_v2_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -817,26 +1142,39 @@ def test_logging_service_v2_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_logging_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_logging_service_v2_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -845,23 +1183,39 @@ def test_logging_service_v2_client_client_options_scopes(client_class, transport api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", grpc_helpers), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_logging_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_logging_service_v2_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -870,11 +1224,14 @@ def test_logging_service_v2_client_client_options_credentials_file(client_class, api_audience=None, ) + def test_logging_service_v2_client_client_options_from_dict(): - with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None client = LoggingServiceV2Client( - client_options={'api_endpoint': 'squid.clam.whelk'} + client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( credentials=None, @@ -889,23 +1246,38 @@ def test_logging_service_v2_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", grpc_helpers), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_logging_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_logging_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -915,13 +1287,13 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -933,12 +1305,12 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, credentials_file=None, quota_project_id=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), scopes=None, default_host="logging.googleapis.com", ssl_credentials=None, @@ -949,11 +1321,14 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, ) -@pytest.mark.parametrize("request_type", [ - logging.DeleteLogRequest, - dict, -]) -def test_delete_log(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging.DeleteLogRequest, + dict, + ], +) +def test_delete_log(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -964,9 +1339,7 @@ def test_delete_log(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_log(request) @@ -986,28 +1359,29 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.DeleteLogRequest( - log_name='log_name_value', + log_name="log_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_log(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.DeleteLogRequest( - log_name='log_name_value', + log_name="log_name_value", ) + def test_delete_log_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1026,7 +1400,9 @@ def test_delete_log_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_log] = mock_rpc request = {} client.delete_log(request) @@ -1040,6 +1416,7 @@ def test_delete_log_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1055,12 +1432,17 @@ async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_as wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_log in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_log + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_log] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_log + ] = mock_rpc request = {} await client.delete_log(request) @@ -1074,8 +1456,11 @@ async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): +async def test_delete_log_async( + transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1086,9 +1471,7 @@ async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=lo request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_log(request) @@ -1107,6 +1490,7 @@ async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=lo async def test_delete_log_async_from_dict(): await test_delete_log_async(request_type=dict) + def test_delete_log_field_headers(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1116,12 +1500,10 @@ def test_delete_log_field_headers(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = 'log_name_value' + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = None client.delete_log(request) @@ -1133,9 +1515,9 @@ def test_delete_log_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'log_name=log_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "log_name=log_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1148,12 +1530,10 @@ async def test_delete_log_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = 'log_name_value' + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_log(request) @@ -1165,9 +1545,9 @@ async def test_delete_log_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'log_name=log_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "log_name=log_name_value", + ) in kw["metadata"] def test_delete_log_flattened(): @@ -1176,15 +1556,13 @@ def test_delete_log_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log( - log_name='log_name_value', + log_name="log_name_value", ) # Establish that the underlying call was made with the expected @@ -1192,7 +1570,7 @@ def test_delete_log_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val @@ -1206,9 +1584,10 @@ def test_delete_log_flattened_error(): with pytest.raises(ValueError): client.delete_log( logging.DeleteLogRequest(), - log_name='log_name_value', + log_name="log_name_value", ) + @pytest.mark.asyncio async def test_delete_log_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -1216,9 +1595,7 @@ async def test_delete_log_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1226,7 +1603,7 @@ async def test_delete_log_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_log( - log_name='log_name_value', + log_name="log_name_value", ) # Establish that the underlying call was made with the expected @@ -1234,9 +1611,10 @@ async def test_delete_log_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_log_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -1248,15 +1626,18 @@ async def test_delete_log_flattened_error_async(): with pytest.raises(ValueError): await client.delete_log( logging.DeleteLogRequest(), - log_name='log_name_value', + log_name="log_name_value", ) -@pytest.mark.parametrize("request_type", [ - logging.WriteLogEntriesRequest, - dict, -]) -def test_write_log_entries(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging.WriteLogEntriesRequest, + dict, + ], +) +def test_write_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1268,11 +1649,10 @@ def test_write_log_entries(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = logging.WriteLogEntriesResponse( - ) + call.return_value = logging.WriteLogEntriesResponse() response = client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. @@ -1290,28 +1670,31 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.WriteLogEntriesRequest( - log_name='log_name_value', + log_name="log_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.write_log_entries), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.write_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.WriteLogEntriesRequest( - log_name='log_name_value', + log_name="log_name_value", ) + def test_write_log_entries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1330,8 +1713,12 @@ def test_write_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.write_log_entries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.write_log_entries] = ( + mock_rpc + ) request = {} client.write_log_entries(request) @@ -1344,8 +1731,11 @@ def test_write_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_write_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1359,12 +1749,17 @@ async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.write_log_entries in client._client._transport._wrapped_methods + assert ( + client._client._transport.write_log_entries + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.write_log_entries] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.write_log_entries + ] = mock_rpc request = {} await client.write_log_entries(request) @@ -1378,8 +1773,11 @@ async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): +async def test_write_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1391,11 +1789,12 @@ async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) response = await client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. @@ -1420,17 +1819,17 @@ def test_write_log_entries_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging.WriteLogEntriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.write_log_entries( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) # Establish that the underlying call was made with the expected @@ -1438,16 +1837,16 @@ def test_write_log_entries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') + mock_val = monitored_resource_pb2.MonitoredResource(type="type_value") assert arg == mock_val arg = args[0].labels - mock_val = {'key_value': 'value_value'} + mock_val = {"key_value": "value_value"} assert arg == mock_val arg = args[0].entries - mock_val = [log_entry.LogEntry(log_name='log_name_value')] + mock_val = [log_entry.LogEntry(log_name="log_name_value")] assert arg == mock_val @@ -1461,12 +1860,13 @@ def test_write_log_entries_flattened_error(): with pytest.raises(ValueError): client.write_log_entries( logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) + @pytest.mark.asyncio async def test_write_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -1475,19 +1875,21 @@ async def test_write_log_entries_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging.WriteLogEntriesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.write_log_entries( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) # Establish that the underlying call was made with the expected @@ -1495,18 +1897,19 @@ async def test_write_log_entries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') + mock_val = monitored_resource_pb2.MonitoredResource(type="type_value") assert arg == mock_val arg = args[0].labels - mock_val = {'key_value': 'value_value'} + mock_val = {"key_value": "value_value"} assert arg == mock_val arg = args[0].entries - mock_val = [log_entry.LogEntry(log_name='log_name_value')] + mock_val = [log_entry.LogEntry(log_name="log_name_value")] assert arg == mock_val + @pytest.mark.asyncio async def test_write_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -1518,18 +1921,21 @@ async def test_write_log_entries_flattened_error_async(): with pytest.raises(ValueError): await client.write_log_entries( logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) -@pytest.mark.parametrize("request_type", [ - logging.ListLogEntriesRequest, - dict, -]) -def test_list_log_entries(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging.ListLogEntriesRequest, + dict, + ], +) +def test_list_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1540,12 +1946,10 @@ def test_list_log_entries(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_log_entries(request) @@ -1557,7 +1961,7 @@ def test_list_log_entries(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_log_entries_non_empty_request_with_auto_populated_field(): @@ -1565,32 +1969,33 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListLogEntriesRequest( - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogEntriesRequest( - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) + def test_list_log_entries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1609,8 +2014,12 @@ def test_list_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_log_entries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_log_entries] = ( + mock_rpc + ) request = {} client.list_log_entries(request) @@ -1623,8 +2032,11 @@ def test_list_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1638,12 +2050,17 @@ async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_log_entries in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_log_entries + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_log_entries] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_log_entries + ] = mock_rpc request = {} await client.list_log_entries(request) @@ -1657,8 +2074,11 @@ async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): +async def test_list_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1669,13 +2089,13 @@ async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_t request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogEntriesResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_log_entries(request) # Establish that the underlying gRPC stub method was called. @@ -1686,7 +2106,7 @@ async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -1700,17 +2120,15 @@ def test_list_log_entries_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogEntriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_entries( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) # Establish that the underlying call was made with the expected @@ -1718,13 +2136,13 @@ def test_list_log_entries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].resource_names - mock_val = ['resource_names_value'] + mock_val = ["resource_names_value"] assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val arg = args[0].order_by - mock_val = 'order_by_value' + mock_val = "order_by_value" assert arg == mock_val @@ -1738,11 +2156,12 @@ def test_list_log_entries_flattened_error(): with pytest.raises(ValueError): client.list_log_entries( logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) + @pytest.mark.asyncio async def test_list_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -1750,19 +2169,19 @@ async def test_list_log_entries_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogEntriesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogEntriesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_log_entries( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) # Establish that the underlying call was made with the expected @@ -1770,15 +2189,16 @@ async def test_list_log_entries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].resource_names - mock_val = ['resource_names_value'] + mock_val = ["resource_names_value"] assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val arg = args[0].order_by - mock_val = 'order_by_value' + mock_val = "order_by_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -1790,9 +2210,9 @@ async def test_list_log_entries_flattened_error_async(): with pytest.raises(ValueError): await client.list_log_entries( logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) @@ -1803,9 +2223,7 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogEntriesResponse( @@ -1814,17 +2232,17 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): log_entry.LogEntry(), log_entry.LogEntry(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogEntriesResponse( entries=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogEntriesResponse( entries=[ log_entry.LogEntry(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogEntriesResponse( entries=[ @@ -1846,8 +2264,9 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, log_entry.LogEntry) - for i in results) + assert all(isinstance(i, log_entry.LogEntry) for i in results) + + def test_list_log_entries_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1855,9 +2274,7 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogEntriesResponse( @@ -1866,17 +2283,17 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): log_entry.LogEntry(), log_entry.LogEntry(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogEntriesResponse( entries=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogEntriesResponse( entries=[ log_entry.LogEntry(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogEntriesResponse( entries=[ @@ -1887,9 +2304,10 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_log_entries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_log_entries_async_pager(): client = LoggingServiceV2AsyncClient( @@ -1898,8 +2316,8 @@ async def test_list_log_entries_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_log_entries), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_log_entries), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogEntriesResponse( @@ -1908,17 +2326,17 @@ async def test_list_log_entries_async_pager(): log_entry.LogEntry(), log_entry.LogEntry(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogEntriesResponse( entries=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogEntriesResponse( entries=[ log_entry.LogEntry(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogEntriesResponse( entries=[ @@ -1928,15 +2346,16 @@ async def test_list_log_entries_async_pager(): ), RuntimeError, ) - async_pager = await client.list_log_entries(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_log_entries( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, log_entry.LogEntry) - for i in responses) + assert all(isinstance(i, log_entry.LogEntry) for i in responses) @pytest.mark.asyncio @@ -1947,8 +2366,8 @@ async def test_list_log_entries_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_log_entries), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_log_entries), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogEntriesResponse( @@ -1957,17 +2376,17 @@ async def test_list_log_entries_async_pages(): log_entry.LogEntry(), log_entry.LogEntry(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogEntriesResponse( entries=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogEntriesResponse( entries=[ log_entry.LogEntry(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogEntriesResponse( entries=[ @@ -1980,18 +2399,22 @@ async def test_list_log_entries_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_log_entries(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging.ListMonitoredResourceDescriptorsRequest, - dict, -]) -def test_list_monitored_resource_descriptors(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging.ListMonitoredResourceDescriptorsRequest, + dict, + ], +) +def test_list_monitored_resource_descriptors(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2003,11 +2426,11 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = 'grp # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_monitored_resource_descriptors(request) @@ -2019,7 +2442,7 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = 'grp # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field(): @@ -2027,28 +2450,31 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListMonitoredResourceDescriptorsRequest( - page_token='page_token_value', + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_monitored_resource_descriptors(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListMonitoredResourceDescriptorsRequest( - page_token='page_token_value', + page_token="page_token_value", ) + def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2063,12 +2489,19 @@ def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_monitored_resource_descriptors in client._transport._wrapped_methods + assert ( + client._transport.list_monitored_resource_descriptors + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_monitored_resource_descriptors] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_monitored_resource_descriptors + ] = mock_rpc request = {} client.list_monitored_resource_descriptors(request) @@ -2081,8 +2514,11 @@ def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2096,12 +2532,17 @@ async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_monitored_resource_descriptors in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_monitored_resource_descriptors + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_monitored_resource_descriptors] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_monitored_resource_descriptors + ] = mock_rpc request = {} await client.list_monitored_resource_descriptors(request) @@ -2115,8 +2556,12 @@ async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): +async def test_list_monitored_resource_descriptors_async( + transport: str = "grpc_asyncio", + request_type=logging.ListMonitoredResourceDescriptorsRequest, +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2128,12 +2573,14 @@ async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListMonitoredResourceDescriptorsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_monitored_resource_descriptors(request) # Establish that the underlying gRPC stub method was called. @@ -2144,7 +2591,7 @@ async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -2160,8 +2607,8 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( @@ -2170,17 +2617,17 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") monitored_resource_pb2.MonitoredResourceDescriptor(), monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[], - next_page_token='def', + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -2194,7 +2641,9 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") expected_metadata = () retry = retries.Retry() timeout = 5 - pager = client.list_monitored_resource_descriptors(request={}, retry=retry, timeout=timeout) + pager = client.list_monitored_resource_descriptors( + request={}, retry=retry, timeout=timeout + ) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -2202,8 +2651,12 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") results = list(pager) assert len(results) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) - for i in results) + assert all( + isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in results + ) + + def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2212,8 +2665,8 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( @@ -2222,17 +2675,17 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") monitored_resource_pb2.MonitoredResourceDescriptor(), monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[], - next_page_token='def', + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -2243,9 +2696,10 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") RuntimeError, ) pages = list(client.list_monitored_resource_descriptors(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pager(): client = LoggingServiceV2AsyncClient( @@ -2254,8 +2708,10 @@ async def test_list_monitored_resource_descriptors_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_monitored_resource_descriptors), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( @@ -2264,17 +2720,17 @@ async def test_list_monitored_resource_descriptors_async_pager(): monitored_resource_pb2.MonitoredResourceDescriptor(), monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[], - next_page_token='def', + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -2284,15 +2740,19 @@ async def test_list_monitored_resource_descriptors_async_pager(): ), RuntimeError, ) - async_pager = await client.list_monitored_resource_descriptors(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_monitored_resource_descriptors( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) - for i in responses) + assert all( + isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in responses + ) @pytest.mark.asyncio @@ -2303,8 +2763,10 @@ async def test_list_monitored_resource_descriptors_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_monitored_resource_descriptors), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( @@ -2313,17 +2775,17 @@ async def test_list_monitored_resource_descriptors_async_pages(): monitored_resource_pb2.MonitoredResourceDescriptor(), monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[], - next_page_token='def', + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -2336,18 +2798,22 @@ async def test_list_monitored_resource_descriptors_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_monitored_resource_descriptors(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging.ListLogsRequest, - dict, -]) -def test_list_logs(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging.ListLogsRequest, + dict, + ], +) +def test_list_logs(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2358,13 +2824,11 @@ def test_list_logs(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', + log_names=["log_names_value"], + next_page_token="next_page_token_value", ) response = client.list_logs(request) @@ -2376,8 +2840,8 @@ def test_list_logs(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' + assert response.log_names == ["log_names_value"] + assert response.next_page_token == "next_page_token_value" def test_list_logs_non_empty_request_with_auto_populated_field(): @@ -2385,30 +2849,31 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListLogsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_logs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_logs_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2427,7 +2892,9 @@ def test_list_logs_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_logs] = mock_rpc request = {} client.list_logs(request) @@ -2441,6 +2908,7 @@ def test_list_logs_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2456,12 +2924,17 @@ async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_logs in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_logs + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_logs] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_logs + ] = mock_rpc request = {} await client.list_logs(request) @@ -2475,8 +2948,11 @@ async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): +async def test_list_logs_async( + transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2487,14 +2963,14 @@ async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=log request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse( + log_names=["log_names_value"], + next_page_token="next_page_token_value", + ) + ) response = await client.list_logs(request) # Establish that the underlying gRPC stub method was called. @@ -2505,14 +2981,15 @@ async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=log # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsAsyncPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' + assert response.log_names == ["log_names_value"] + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_logs_async_from_dict(): await test_list_logs_async(request_type=dict) + def test_list_logs_field_headers(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2522,12 +2999,10 @@ def test_list_logs_field_headers(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: call.return_value = logging.ListLogsResponse() client.list_logs(request) @@ -2539,9 +3014,9 @@ def test_list_logs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2554,13 +3029,13 @@ async def test_list_logs_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse() + ) await client.list_logs(request) # Establish that the underlying gRPC stub method was called. @@ -2571,9 +3046,9 @@ async def test_list_logs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_logs_flattened(): @@ -2582,15 +3057,13 @@ def test_list_logs_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_logs( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2598,7 +3071,7 @@ def test_list_logs_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -2612,9 +3085,10 @@ def test_list_logs_flattened_error(): with pytest.raises(ValueError): client.list_logs( logging.ListLogsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_logs_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -2622,17 +3096,17 @@ async def test_list_logs_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_logs( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2640,9 +3114,10 @@ async def test_list_logs_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_logs_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -2654,7 +3129,7 @@ async def test_list_logs_flattened_error_async(): with pytest.raises(ValueError): await client.list_logs( logging.ListLogsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -2665,9 +3140,7 @@ def test_list_logs_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( @@ -2676,17 +3149,17 @@ def test_list_logs_pager(transport_name: str = "grpc"): str(), str(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogsResponse( log_names=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogsResponse( log_names=[ str(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogsResponse( log_names=[ @@ -2701,9 +3174,7 @@ def test_list_logs_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_logs(request={}, retry=retry, timeout=timeout) @@ -2713,8 +3184,9 @@ def test_list_logs_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, str) - for i in results) + assert all(isinstance(i, str) for i in results) + + def test_list_logs_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2722,9 +3194,7 @@ def test_list_logs_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( @@ -2733,17 +3203,17 @@ def test_list_logs_pages(transport_name: str = "grpc"): str(), str(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogsResponse( log_names=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogsResponse( log_names=[ str(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogsResponse( log_names=[ @@ -2754,9 +3224,10 @@ def test_list_logs_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_logs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_logs_async_pager(): client = LoggingServiceV2AsyncClient( @@ -2765,8 +3236,8 @@ async def test_list_logs_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_logs), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_logs), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( @@ -2775,17 +3246,17 @@ async def test_list_logs_async_pager(): str(), str(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogsResponse( log_names=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogsResponse( log_names=[ str(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogsResponse( log_names=[ @@ -2795,15 +3266,16 @@ async def test_list_logs_async_pager(): ), RuntimeError, ) - async_pager = await client.list_logs(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_logs( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, str) - for i in responses) + assert all(isinstance(i, str) for i in responses) @pytest.mark.asyncio @@ -2814,8 +3286,8 @@ async def test_list_logs_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_logs), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_logs), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( @@ -2824,17 +3296,17 @@ async def test_list_logs_async_pages(): str(), str(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogsResponse( log_names=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogsResponse( log_names=[ str(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogsResponse( log_names=[ @@ -2847,18 +3319,22 @@ async def test_list_logs_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_logs(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging.TailLogEntriesRequest, - dict, -]) -def test_tail_log_entries(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging.TailLogEntriesRequest, + dict, + ], +) +def test_tail_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2870,9 +3346,7 @@ def test_tail_log_entries(request_type, transport: str = 'grpc'): requests = [request] # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.tail_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.tail_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([logging.TailLogEntriesResponse()]) response = client.tail_log_entries(iter(requests)) @@ -2905,8 +3379,12 @@ def test_tail_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.tail_log_entries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.tail_log_entries] = ( + mock_rpc + ) request = [{}] client.tail_log_entries(request) @@ -2919,8 +3397,11 @@ def test_tail_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_tail_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2934,12 +3415,17 @@ async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.tail_log_entries in client._client._transport._wrapped_methods + assert ( + client._client._transport.tail_log_entries + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.tail_log_entries] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.tail_log_entries + ] = mock_rpc request = [{}] await client.tail_log_entries(request) @@ -2953,8 +3439,11 @@ async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): +async def test_tail_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2966,12 +3455,12 @@ async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_t requests = [request] # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.tail_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.tail_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[logging.TailLogEntriesResponse()]) + call.return_value.read = mock.AsyncMock( + side_effect=[logging.TailLogEntriesResponse()] + ) response = await client.tail_log_entries(iter(requests)) # Establish that the underlying gRPC stub method was called. @@ -3027,8 +3516,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = LoggingServiceV2Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -3050,6 +3538,7 @@ def test_transport_instance(): client = LoggingServiceV2Client(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( @@ -3064,17 +3553,22 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.LoggingServiceV2GrpcTransport, - transports.LoggingServiceV2GrpcAsyncIOTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = LoggingServiceV2Client.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -3084,8 +3578,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -3099,9 +3592,7 @@ def test_delete_log_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = None client.delete_log(request=None) @@ -3123,8 +3614,8 @@ def test_write_log_entries_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: call.return_value = logging.WriteLogEntriesResponse() client.write_log_entries(request=None) @@ -3145,9 +3636,7 @@ def test_list_log_entries_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: call.return_value = logging.ListLogEntriesResponse() client.list_log_entries(request=None) @@ -3169,8 +3658,8 @@ def test_list_monitored_resource_descriptors_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: call.return_value = logging.ListMonitoredResourceDescriptorsResponse() client.list_monitored_resource_descriptors(request=None) @@ -3191,9 +3680,7 @@ def test_list_logs_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: call.return_value = logging.ListLogsResponse() client.list_logs(request=None) @@ -3214,8 +3701,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -3230,9 +3716,7 @@ async def test_delete_log_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_log(request=None) @@ -3256,11 +3740,12 @@ async def test_write_log_entries_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) await client.write_log_entries(request=None) # Establish that the underlying stub method was called. @@ -3281,13 +3766,13 @@ async def test_list_log_entries_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogEntriesResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_log_entries(request=None) # Establish that the underlying stub method was called. @@ -3309,12 +3794,14 @@ async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListMonitoredResourceDescriptorsResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_monitored_resource_descriptors(request=None) # Establish that the underlying stub method was called. @@ -3335,14 +3822,14 @@ async def test_list_logs_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse( + log_names=["log_names_value"], + next_page_token="next_page_token_value", + ) + ) await client.list_logs(request=None) # Establish that the underlying stub method was called. @@ -3363,18 +3850,21 @@ def test_transport_grpc_default(): transports.LoggingServiceV2GrpcTransport, ) + def test_logging_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.LoggingServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_logging_service_v2_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__') as Transport: + with mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__" + ) as Transport: Transport.return_value = None transport = transports.LoggingServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), @@ -3383,15 +3873,15 @@ def test_logging_service_v2_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'delete_log', - 'write_log_entries', - 'list_log_entries', - 'list_monitored_resource_descriptors', - 'list_logs', - 'tail_log_entries', - 'get_operation', - 'cancel_operation', - 'list_operations', + "delete_log", + "write_log_entries", + "list_log_entries", + "list_monitored_resource_descriptors", + "list_logs", + "tail_log_entries", + "get_operation", + "cancel_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3402,7 +3892,7 @@ def test_logging_service_v2_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -3411,29 +3901,42 @@ def test_logging_service_v2_base_transport(): def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id="octopus", ) def test_logging_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport() @@ -3442,18 +3945,18 @@ def test_logging_service_v2_base_transport_with_adc(): def test_logging_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) LoggingServiceV2Client() adc.assert_called_once_with( scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id=None, ) @@ -3468,12 +3971,18 @@ def test_logging_service_v2_auth_adc(): def test_logging_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id="octopus", ) @@ -3486,39 +3995,39 @@ def test_logging_service_v2_transport_auth_adc(transport_class): ], ) def test_logging_service_v2_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.LoggingServiceV2GrpcTransport, grpc_helpers), - (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "logging.googleapis.com:443", @@ -3526,12 +4035,12 @@ def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpe credentials_file=None, quota_project_id="octopus", default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), scopes=["1", "2"], default_host="logging.googleapis.com", ssl_credentials=None, @@ -3542,10 +4051,14 @@ def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpe ) -@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) -def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -3554,7 +4067,7 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -3575,45 +4088,52 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_logging_service_v2_host_no_port(transport_name): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'logging.googleapis.com:443' + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com" + ), + transport=transport_name, ) + assert client.transport._host == ("logging.googleapis.com:443") + -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_logging_service_v2_host_with_port(transport_name): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com:8000" + ), transport=transport_name, ) - assert client.transport._host == ( - 'logging.googleapis.com:8000' - ) + assert client.transport._host == ("logging.googleapis.com:8000") + def test_logging_service_v2_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.LoggingServiceV2GrpcTransport( @@ -3626,7 +4146,7 @@ def test_logging_service_v2_grpc_transport_channel(): def test_logging_service_v2_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.LoggingServiceV2GrpcAsyncIOTransport( @@ -3641,12 +4161,22 @@ def test_logging_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( - transport_class + transport_class, ): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -3655,7 +4185,7 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -3685,17 +4215,23 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) -def test_logging_service_v2_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -3726,7 +4262,10 @@ def test_logging_service_v2_transport_channel_mtls_with_adc( def test_log_path(): project = "squid" log = "clam" - expected = "projects/{project}/logs/{log}".format(project=project, log=log, ) + expected = "projects/{project}/logs/{log}".format( + project=project, + log=log, + ) actual = LoggingServiceV2Client.log_path(project, log) assert expected == actual @@ -3742,9 +4281,12 @@ def test_parse_log_path(): actual = LoggingServiceV2Client.parse_log_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = LoggingServiceV2Client.common_billing_account_path(billing_account) assert expected == actual @@ -3759,9 +4301,12 @@ def test_parse_common_billing_account_path(): actual = LoggingServiceV2Client.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = LoggingServiceV2Client.common_folder_path(folder) assert expected == actual @@ -3776,9 +4321,12 @@ def test_parse_common_folder_path(): actual = LoggingServiceV2Client.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = LoggingServiceV2Client.common_organization_path(organization) assert expected == actual @@ -3793,9 +4341,12 @@ def test_parse_common_organization_path(): actual = LoggingServiceV2Client.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = LoggingServiceV2Client.common_project_path(project) assert expected == actual @@ -3810,10 +4361,14 @@ def test_parse_common_project_path(): actual = LoggingServiceV2Client.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = LoggingServiceV2Client.common_location_path(project, location) assert expected == actual @@ -3833,14 +4388,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.LoggingServiceV2Transport, "_prep_wrapped_messages" + ) as prep: client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.LoggingServiceV2Transport, "_prep_wrapped_messages" + ) as prep: transport_class = LoggingServiceV2Client.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -3851,7 +4410,8 @@ def test_client_with_default_client_info(): def test_cancel_operation(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3871,10 +4431,12 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3884,9 +4446,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3909,7 +4469,7 @@ def test_cancel_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. @@ -3919,7 +4479,11 @@ def test_cancel_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): @@ -3934,9 +4498,7 @@ async def test_cancel_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3945,7 +4507,10 @@ async def test_cancel_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_cancel_operation_from_dict(): @@ -3964,6 +4529,7 @@ def test_cancel_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( @@ -3972,9 +4538,7 @@ async def test_cancel_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation( request={ "name": "locations", @@ -3998,6 +4562,7 @@ def test_cancel_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.CancelOperationRequest() + @pytest.mark.asyncio async def test_cancel_operation_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -4006,9 +4571,7 @@ async def test_cancel_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4018,7 +4581,8 @@ async def test_cancel_operation_flattened_async(): def test_get_operation(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4038,10 +4602,12 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4086,7 +4652,11 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): @@ -4112,7 +4682,10 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_get_operation_from_dict(): @@ -4131,6 +4704,7 @@ def test_get_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( @@ -4165,6 +4739,7 @@ def test_get_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.GetOperationRequest() + @pytest.mark.asyncio async def test_get_operation_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -4185,7 +4760,8 @@ async def test_get_operation_flattened_async(): def test_list_operations(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4205,10 +4781,12 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4253,7 +4831,11 @@ def test_list_operations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_operations_field_headers_async(): @@ -4279,7 +4861,10 @@ async def test_list_operations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_operations_from_dict(): @@ -4298,6 +4883,7 @@ def test_list_operations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = LoggingServiceV2AsyncClient( @@ -4332,6 +4918,7 @@ def test_list_operations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.ListOperationsRequest() + @pytest.mark.asyncio async def test_list_operations_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -4352,10 +4939,11 @@ async def test_list_operations_flattened_async(): def test_transport_close_grpc(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -4364,10 +4952,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -4375,12 +4964,11 @@ async def test_transport_close_grpc_asyncio(): def test_client_ctx(): transports = [ - 'grpc', + "grpc", ] for transport in transports: client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -4389,10 +4977,14 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -4407,7 +4999,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 724afc7a5d59..aaff942ef269 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,38 +22,24 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio import json import math +from collections.abc import Mapping, Sequence + +import grpc import pytest -from collections.abc import Sequence, Mapping from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from grpc.experimental import aio from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2AsyncClient -from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client -from google.cloud.logging_v2.services.metrics_service_v2 import pagers -from google.cloud.logging_v2.services.metrics_service_v2 import transports -from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account import google.api.distribution_pb2 as distribution_pb2 # type: ignore import google.api.label_pb2 as label_pb2 # type: ignore import google.api.launch_stage_pb2 as launch_stage_pb2 # type: ignore @@ -60,8 +47,26 @@ import google.auth import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - - +from google.api_core import ( + client_options, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.metrics_service_v2 import ( + MetricsServiceV2AsyncClient, + MetricsServiceV2Client, + pagers, + transports, +) +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -76,9 +81,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -86,17 +93,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -108,21 +125,48 @@ def test__get_default_mtls_endpoint(): custom_endpoint = ".custom" assert MetricsServiceV2Client._get_default_mtls_endpoint(None) is None - assert MetricsServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MetricsServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert MetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - assert MetricsServiceV2Client._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + assert ( + MetricsServiceV2Client._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + MetricsServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + assert ( + MetricsServiceV2Client._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + def test__read_environment_variables(): assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert MetricsServiceV2Client._read_environment_variables() == (True, "auto", None) + assert MetricsServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", None) + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} @@ -136,27 +180,46 @@ def test__read_environment_variables(): ) else: assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MetricsServiceV2Client._read_environment_variables() == ( False, - "auto", + "never", None, ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert MetricsServiceV2Client._read_environment_variables() == (False, "never", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MetricsServiceV2Client._read_environment_variables() == (False, "always", None) + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", None) + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: MetricsServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -165,7 +228,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert MetricsServiceV2Client._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -173,7 +238,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert MetricsServiceV2Client._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -185,7 +252,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert MetricsServiceV2Client._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -197,7 +266,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert MetricsServiceV2Client._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -209,7 +280,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert MetricsServiceV2Client._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -224,83 +297,167 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): MetricsServiceV2Client._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert MetricsServiceV2Client._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert MetricsServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert MetricsServiceV2Client._get_client_cert_source(None, False) is None - assert MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None - assert MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MetricsServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MetricsServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert MetricsServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source - assert MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(MetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2Client)) -@mock.patch.object(MetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2AsyncClient)) +@mock.patch.object( + MetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert MetricsServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert MetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert MetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert MetricsServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + MetricsServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "always") + == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - MetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + MetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert MetricsServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert MetricsServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert MetricsServiceV2Client._get_universe_domain(None, None) == MetricsServiceV2Client._DEFAULT_UNIVERSE + assert ( + MetricsServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MetricsServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + MetricsServiceV2Client._get_universe_domain(None, None) + == MetricsServiceV2Client._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: MetricsServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -316,7 +473,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -329,59 +487,83 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (MetricsServiceV2Client, "grpc"), - (MetricsServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_metrics_service_v2_client_from_service_account_info(client_class, transport_name): + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MetricsServiceV2Client, "grpc"), + (MetricsServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_metrics_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MetricsServiceV2GrpcTransport, "grpc"), - (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_metrics_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MetricsServiceV2GrpcTransport, "grpc"), + (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_metrics_service_v2_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (MetricsServiceV2Client, "grpc"), - (MetricsServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_metrics_service_v2_client_from_service_account_file(client_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MetricsServiceV2Client, "grpc"), + (MetricsServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_metrics_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") def test_metrics_service_v2_client_get_transport_class(): @@ -395,29 +577,44 @@ def test_metrics_service_v2_client_get_transport_class(): assert transport == transports.MetricsServiceV2GrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(MetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2Client)) -@mock.patch.object(MetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2AsyncClient)) -def test_metrics_service_v2_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + MetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), +) +def test_metrics_service_v2_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(MetricsServiceV2Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(MetricsServiceV2Client, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MetricsServiceV2Client, 'get_transport_class') as gtc: + with mock.patch.object(MetricsServiceV2Client, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -435,13 +632,15 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -453,7 +652,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -473,17 +672,22 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -492,46 +696,90 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" + api_audience="https://language.googleapis.com", ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "true"), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "false"), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(MetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2Client)) -@mock.patch.object(MetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2AsyncClient)) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + MetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + "true", + ), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + MetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + "false", + ), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + MetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_metrics_service_v2_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -550,12 +798,22 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -576,15 +834,22 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -594,19 +859,31 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, ) -@pytest.mark.parametrize("client_class", [ - MetricsServiceV2Client, MetricsServiceV2AsyncClient -]) -@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) -@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +@pytest.mark.parametrize( + "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient] +) +@mock.patch.object( + MetricsServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetricsServiceV2AsyncClient), +) def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -614,18 +891,25 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -662,23 +946,23 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -709,23 +993,23 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -741,16 +1025,27 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -760,27 +1055,50 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) -@pytest.mark.parametrize("client_class", [ - MetricsServiceV2Client, MetricsServiceV2AsyncClient -]) -@mock.patch.object(MetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2Client)) -@mock.patch.object(MetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2AsyncClient)) + +@pytest.mark.parametrize( + "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient] +) +@mock.patch.object( + MetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MetricsServiceV2AsyncClient), +) def test_metrics_service_v2_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -803,11 +1121,19 @@ def test_metrics_service_v2_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -815,26 +1141,39 @@ def test_metrics_service_v2_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_metrics_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_metrics_service_v2_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -843,23 +1182,39 @@ def test_metrics_service_v2_client_client_options_scopes(client_class, transport api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", grpc_helpers), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_metrics_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_metrics_service_v2_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -868,11 +1223,14 @@ def test_metrics_service_v2_client_client_options_credentials_file(client_class, api_audience=None, ) + def test_metrics_service_v2_client_client_options_from_dict(): - with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None client = MetricsServiceV2Client( - client_options={'api_endpoint': 'squid.clam.whelk'} + client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( credentials=None, @@ -887,23 +1245,38 @@ def test_metrics_service_v2_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", grpc_helpers), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_metrics_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_metrics_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -913,13 +1286,13 @@ def test_metrics_service_v2_client_create_channel_credentials_file(client_class, ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -931,12 +1304,12 @@ def test_metrics_service_v2_client_create_channel_credentials_file(client_class, credentials_file=None, quota_project_id=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), scopes=None, default_host="logging.googleapis.com", ssl_credentials=None, @@ -947,11 +1320,14 @@ def test_metrics_service_v2_client_create_channel_credentials_file(client_class, ) -@pytest.mark.parametrize("request_type", [ - logging_metrics.ListLogMetricsRequest, - dict, -]) -def test_list_log_metrics(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.ListLogMetricsRequest, + dict, + ], +) +def test_list_log_metrics(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -962,12 +1338,10 @@ def test_list_log_metrics(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_log_metrics(request) @@ -979,7 +1353,7 @@ def test_list_log_metrics(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_log_metrics_non_empty_request_with_auto_populated_field(): @@ -987,30 +1361,31 @@ def test_list_log_metrics_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.ListLogMetricsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_log_metrics(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.ListLogMetricsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_log_metrics_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1029,8 +1404,12 @@ def test_list_log_metrics_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_log_metrics] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_log_metrics] = ( + mock_rpc + ) request = {} client.list_log_metrics(request) @@ -1043,8 +1422,11 @@ def test_list_log_metrics_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_log_metrics_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1058,12 +1440,17 @@ async def test_list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_log_metrics in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_log_metrics + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_log_metrics] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_log_metrics + ] = mock_rpc request = {} await client.list_log_metrics(request) @@ -1077,8 +1464,11 @@ async def test_list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): +async def test_list_log_metrics_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest +): client = MetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1089,13 +1479,13 @@ async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_t request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. @@ -1106,13 +1496,14 @@ async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_log_metrics_async_from_dict(): await test_list_log_metrics_async(request_type=dict) + def test_list_log_metrics_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1122,12 +1513,10 @@ def test_list_log_metrics_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: call.return_value = logging_metrics.ListLogMetricsResponse() client.list_log_metrics(request) @@ -1139,9 +1528,9 @@ def test_list_log_metrics_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1154,13 +1543,13 @@ async def test_list_log_metrics_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse() + ) await client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. @@ -1171,9 +1560,9 @@ async def test_list_log_metrics_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_log_metrics_flattened(): @@ -1182,15 +1571,13 @@ def test_list_log_metrics_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_metrics( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1198,7 +1585,7 @@ def test_list_log_metrics_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1212,9 +1599,10 @@ def test_list_log_metrics_flattened_error(): with pytest.raises(ValueError): client.list_log_metrics( logging_metrics.ListLogMetricsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_log_metrics_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -1222,17 +1610,17 @@ async def test_list_log_metrics_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_log_metrics( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1240,9 +1628,10 @@ async def test_list_log_metrics_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_log_metrics_flattened_error_async(): client = MetricsServiceV2AsyncClient( @@ -1254,7 +1643,7 @@ async def test_list_log_metrics_flattened_error_async(): with pytest.raises(ValueError): await client.list_log_metrics( logging_metrics.ListLogMetricsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1265,9 +1654,7 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_metrics.ListLogMetricsResponse( @@ -1276,17 +1663,17 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): logging_metrics.LogMetric(), logging_metrics.LogMetric(), ], - next_page_token='abc', + next_page_token="abc", ), logging_metrics.ListLogMetricsResponse( metrics=[], - next_page_token='def', + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( metrics=[ logging_metrics.LogMetric(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( metrics=[ @@ -1301,9 +1688,7 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_log_metrics(request={}, retry=retry, timeout=timeout) @@ -1313,8 +1698,9 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_metrics.LogMetric) - for i in results) + assert all(isinstance(i, logging_metrics.LogMetric) for i in results) + + def test_list_log_metrics_pages(transport_name: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1322,9 +1708,7 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_metrics.ListLogMetricsResponse( @@ -1333,17 +1717,17 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"): logging_metrics.LogMetric(), logging_metrics.LogMetric(), ], - next_page_token='abc', + next_page_token="abc", ), logging_metrics.ListLogMetricsResponse( metrics=[], - next_page_token='def', + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( metrics=[ logging_metrics.LogMetric(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( metrics=[ @@ -1354,9 +1738,10 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_log_metrics(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_log_metrics_async_pager(): client = MetricsServiceV2AsyncClient( @@ -1365,8 +1750,8 @@ async def test_list_log_metrics_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_log_metrics), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_metrics.ListLogMetricsResponse( @@ -1375,17 +1760,17 @@ async def test_list_log_metrics_async_pager(): logging_metrics.LogMetric(), logging_metrics.LogMetric(), ], - next_page_token='abc', + next_page_token="abc", ), logging_metrics.ListLogMetricsResponse( metrics=[], - next_page_token='def', + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( metrics=[ logging_metrics.LogMetric(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( metrics=[ @@ -1395,15 +1780,16 @@ async def test_list_log_metrics_async_pager(): ), RuntimeError, ) - async_pager = await client.list_log_metrics(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_log_metrics( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_metrics.LogMetric) - for i in responses) + assert all(isinstance(i, logging_metrics.LogMetric) for i in responses) @pytest.mark.asyncio @@ -1414,8 +1800,8 @@ async def test_list_log_metrics_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_log_metrics), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_metrics.ListLogMetricsResponse( @@ -1424,17 +1810,17 @@ async def test_list_log_metrics_async_pages(): logging_metrics.LogMetric(), logging_metrics.LogMetric(), ], - next_page_token='abc', + next_page_token="abc", ), logging_metrics.ListLogMetricsResponse( metrics=[], - next_page_token='def', + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( metrics=[ logging_metrics.LogMetric(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( metrics=[ @@ -1447,18 +1833,22 @@ async def test_list_log_metrics_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_log_metrics(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_metrics.GetLogMetricRequest, - dict, -]) -def test_get_log_metric(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.GetLogMetricRequest, + dict, + ], +) +def test_get_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1469,17 +1859,15 @@ def test_get_log_metric(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client.get_log_metric(request) @@ -1492,12 +1880,12 @@ def test_get_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1506,28 +1894,29 @@ def test_get_log_metric_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.GetLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.GetLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) + def test_get_log_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1546,7 +1935,9 @@ def test_get_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_log_metric] = mock_rpc request = {} client.get_log_metric(request) @@ -1560,8 +1951,11 @@ def test_get_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1575,12 +1969,17 @@ async def test_get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_log_metric in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_log_metric + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_log_metric] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_log_metric + ] = mock_rpc request = {} await client.get_log_metric(request) @@ -1594,8 +1993,11 @@ async def test_get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grp assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): +async def test_get_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest +): client = MetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1606,19 +2008,19 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) response = await client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1629,12 +2031,12 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1642,6 +2044,7 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ async def test_get_log_metric_async_from_dict(): await test_get_log_metric_async(request_type=dict) + def test_get_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1651,12 +2054,10 @@ def test_get_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: call.return_value = logging_metrics.LogMetric() client.get_log_metric(request) @@ -1668,9 +2069,9 @@ def test_get_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1683,13 +2084,13 @@ async def test_get_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) await client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1700,9 +2101,9 @@ async def test_get_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] def test_get_log_metric_flattened(): @@ -1711,15 +2112,13 @@ def test_get_log_metric_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -1727,7 +2126,7 @@ def test_get_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @@ -1741,9 +2140,10 @@ def test_get_log_metric_flattened_error(): with pytest.raises(ValueError): client.get_log_metric( logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) + @pytest.mark.asyncio async def test_get_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -1751,17 +2151,17 @@ async def test_get_log_metric_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -1769,9 +2169,10 @@ async def test_get_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( @@ -1783,15 +2184,18 @@ async def test_get_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client.get_log_metric( logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_metrics.CreateLogMetricRequest, - dict, -]) -def test_create_log_metric(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.CreateLogMetricRequest, + dict, + ], +) +def test_create_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1803,16 +2207,16 @@ def test_create_log_metric(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client.create_log_metric(request) @@ -1825,12 +2229,12 @@ def test_create_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1839,28 +2243,31 @@ def test_create_log_metric_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.CreateLogMetricRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.create_log_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.CreateLogMetricRequest( - parent='parent_value', + parent="parent_value", ) + def test_create_log_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1879,8 +2286,12 @@ def test_create_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_log_metric] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_log_metric] = ( + mock_rpc + ) request = {} client.create_log_metric(request) @@ -1893,8 +2304,11 @@ def test_create_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1908,12 +2322,17 @@ async def test_create_log_metric_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_log_metric in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_log_metric + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_log_metric] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_log_metric + ] = mock_rpc request = {} await client.create_log_metric(request) @@ -1927,8 +2346,11 @@ async def test_create_log_metric_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): +async def test_create_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest +): client = MetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1940,18 +2362,20 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) response = await client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1962,12 +2386,12 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1975,6 +2399,7 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ async def test_create_log_metric_async_from_dict(): await test_create_log_metric_async(request_type=dict) + def test_create_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1984,12 +2409,12 @@ def test_create_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: call.return_value = logging_metrics.LogMetric() client.create_log_metric(request) @@ -2001,9 +2426,9 @@ def test_create_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2016,13 +2441,15 @@ async def test_create_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + type(client.transport.create_log_metric), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) await client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -2033,9 +2460,9 @@ async def test_create_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_log_metric_flattened(): @@ -2045,15 +2472,15 @@ def test_create_log_metric_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_log_metric( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2061,10 +2488,10 @@ def test_create_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @@ -2078,10 +2505,11 @@ def test_create_log_metric_flattened_error(): with pytest.raises(ValueError): client.create_log_metric( logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) + @pytest.mark.asyncio async def test_create_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -2090,17 +2518,19 @@ async def test_create_log_metric_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_log_metric( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2108,12 +2538,13 @@ async def test_create_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test_create_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( @@ -2125,16 +2556,19 @@ async def test_create_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client.create_log_metric( logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - logging_metrics.UpdateLogMetricRequest, - dict, -]) -def test_update_log_metric(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.UpdateLogMetricRequest, + dict, + ], +) +def test_update_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2146,16 +2580,16 @@ def test_update_log_metric(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client.update_log_metric(request) @@ -2168,12 +2602,12 @@ def test_update_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -2182,28 +2616,31 @@ def test_update_log_metric_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.UpdateLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.update_log_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.UpdateLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) + def test_update_log_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2222,8 +2659,12 @@ def test_update_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_log_metric] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_log_metric] = ( + mock_rpc + ) request = {} client.update_log_metric(request) @@ -2236,8 +2677,11 @@ def test_update_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2251,12 +2695,17 @@ async def test_update_log_metric_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_log_metric in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_log_metric + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_log_metric] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_log_metric + ] = mock_rpc request = {} await client.update_log_metric(request) @@ -2270,8 +2719,11 @@ async def test_update_log_metric_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): +async def test_update_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest +): client = MetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2283,18 +2735,20 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) response = await client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -2305,12 +2759,12 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -2318,6 +2772,7 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ async def test_update_log_metric_async_from_dict(): await test_update_log_metric_async(request_type=dict) + def test_update_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2327,12 +2782,12 @@ def test_update_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: call.return_value = logging_metrics.LogMetric() client.update_log_metric(request) @@ -2344,9 +2799,9 @@ def test_update_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2359,13 +2814,15 @@ async def test_update_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + type(client.transport.update_log_metric), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) await client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -2376,9 +2833,9 @@ async def test_update_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] def test_update_log_metric_flattened(): @@ -2388,15 +2845,15 @@ def test_update_log_metric_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_log_metric( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2404,10 +2861,10 @@ def test_update_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @@ -2421,10 +2878,11 @@ def test_update_log_metric_flattened_error(): with pytest.raises(ValueError): client.update_log_metric( logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) + @pytest.mark.asyncio async def test_update_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -2433,17 +2891,19 @@ async def test_update_log_metric_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_log_metric( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2451,12 +2911,13 @@ async def test_update_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test_update_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( @@ -2468,16 +2929,19 @@ async def test_update_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client.update_log_metric( logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - logging_metrics.DeleteLogMetricRequest, - dict, -]) -def test_delete_log_metric(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.DeleteLogMetricRequest, + dict, + ], +) +def test_delete_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2489,8 +2953,8 @@ def test_delete_log_metric(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_log_metric(request) @@ -2510,28 +2974,31 @@ def test_delete_log_metric_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.DeleteLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.delete_log_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.DeleteLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) + def test_delete_log_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2550,8 +3017,12 @@ def test_delete_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_log_metric] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_log_metric] = ( + mock_rpc + ) request = {} client.delete_log_metric(request) @@ -2564,8 +3035,11 @@ def test_delete_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2579,12 +3053,17 @@ async def test_delete_log_metric_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_log_metric in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_log_metric + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_log_metric] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_log_metric + ] = mock_rpc request = {} await client.delete_log_metric(request) @@ -2598,8 +3077,11 @@ async def test_delete_log_metric_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): +async def test_delete_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest +): client = MetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2611,8 +3093,8 @@ async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_log_metric(request) @@ -2631,6 +3113,7 @@ async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_ async def test_delete_log_metric_async_from_dict(): await test_delete_log_metric_async(request_type=dict) + def test_delete_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2640,12 +3123,12 @@ def test_delete_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: call.return_value = None client.delete_log_metric(request) @@ -2657,9 +3140,9 @@ def test_delete_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2672,12 +3155,12 @@ async def test_delete_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_log_metric(request) @@ -2689,9 +3172,9 @@ async def test_delete_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] def test_delete_log_metric_flattened(): @@ -2701,14 +3184,14 @@ def test_delete_log_metric_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -2716,7 +3199,7 @@ def test_delete_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @@ -2730,9 +3213,10 @@ def test_delete_log_metric_flattened_error(): with pytest.raises(ValueError): client.delete_log_metric( logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) + @pytest.mark.asyncio async def test_delete_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -2741,8 +3225,8 @@ async def test_delete_log_metric_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2750,7 +3234,7 @@ async def test_delete_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -2758,9 +3242,10 @@ async def test_delete_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( @@ -2772,7 +3257,7 @@ async def test_delete_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client.delete_log_metric( logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) @@ -2814,8 +3299,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = MetricsServiceV2Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -2837,6 +3321,7 @@ def test_transport_instance(): client = MetricsServiceV2Client(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( @@ -2851,17 +3336,22 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.MetricsServiceV2GrpcTransport, - transports.MetricsServiceV2GrpcAsyncIOTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = MetricsServiceV2Client.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -2871,8 +3361,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -2886,9 +3375,7 @@ def test_list_log_metrics_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: call.return_value = logging_metrics.ListLogMetricsResponse() client.list_log_metrics(request=None) @@ -2909,9 +3396,7 @@ def test_get_log_metric_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: call.return_value = logging_metrics.LogMetric() client.get_log_metric(request=None) @@ -2933,8 +3418,8 @@ def test_create_log_metric_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: call.return_value = logging_metrics.LogMetric() client.create_log_metric(request=None) @@ -2956,8 +3441,8 @@ def test_update_log_metric_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: call.return_value = logging_metrics.LogMetric() client.update_log_metric(request=None) @@ -2979,8 +3464,8 @@ def test_delete_log_metric_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: call.return_value = None client.delete_log_metric(request=None) @@ -3001,8 +3486,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -3017,13 +3501,13 @@ async def test_list_log_metrics_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_log_metrics(request=None) # Establish that the underlying stub method was called. @@ -3044,19 +3528,19 @@ async def test_get_log_metric_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) await client.get_log_metric(request=None) # Establish that the underlying stub method was called. @@ -3078,18 +3562,20 @@ async def test_create_log_metric_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) await client.create_log_metric(request=None) # Establish that the underlying stub method was called. @@ -3111,18 +3597,20 @@ async def test_update_log_metric_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) await client.update_log_metric(request=None) # Establish that the underlying stub method was called. @@ -3144,8 +3632,8 @@ async def test_delete_log_metric_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_log_metric(request=None) @@ -3168,18 +3656,21 @@ def test_transport_grpc_default(): transports.MetricsServiceV2GrpcTransport, ) + def test_metrics_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.MetricsServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_metrics_service_v2_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__') as Transport: + with mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__" + ) as Transport: Transport.return_value = None transport = transports.MetricsServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), @@ -3188,14 +3679,14 @@ def test_metrics_service_v2_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_log_metrics', - 'get_log_metric', - 'create_log_metric', - 'update_log_metric', - 'delete_log_metric', - 'get_operation', - 'cancel_operation', - 'list_operations', + "list_log_metrics", + "get_log_metric", + "create_log_metric", + "update_log_metric", + "delete_log_metric", + "get_operation", + "cancel_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3206,7 +3697,7 @@ def test_metrics_service_v2_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -3215,29 +3706,42 @@ def test_metrics_service_v2_base_transport(): def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id="octopus", ) def test_metrics_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport() @@ -3246,18 +3750,18 @@ def test_metrics_service_v2_base_transport_with_adc(): def test_metrics_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) MetricsServiceV2Client() adc.assert_called_once_with( scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id=None, ) @@ -3272,12 +3776,18 @@ def test_metrics_service_v2_auth_adc(): def test_metrics_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id="octopus", ) @@ -3290,39 +3800,39 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): ], ) def test_metrics_service_v2_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.MetricsServiceV2GrpcTransport, grpc_helpers), - (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "logging.googleapis.com:443", @@ -3330,12 +3840,12 @@ def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpe credentials_file=None, quota_project_id="octopus", default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), scopes=["1", "2"], default_host="logging.googleapis.com", ssl_credentials=None, @@ -3346,10 +3856,14 @@ def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpe ) -@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) -def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -3358,7 +3872,7 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -3379,45 +3893,52 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_metrics_service_v2_host_no_port(transport_name): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'logging.googleapis.com:443' + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com" + ), + transport=transport_name, ) + assert client.transport._host == ("logging.googleapis.com:443") + -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_metrics_service_v2_host_with_port(transport_name): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com:8000" + ), transport=transport_name, ) - assert client.transport._host == ( - 'logging.googleapis.com:8000' - ) + assert client.transport._host == ("logging.googleapis.com:8000") + def test_metrics_service_v2_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.MetricsServiceV2GrpcTransport( @@ -3430,7 +3951,7 @@ def test_metrics_service_v2_grpc_transport_channel(): def test_metrics_service_v2_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.MetricsServiceV2GrpcAsyncIOTransport( @@ -3445,12 +3966,22 @@ def test_metrics_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( - transport_class + transport_class, ): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -3459,7 +3990,7 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -3489,17 +4020,23 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) -def test_metrics_service_v2_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +def test_metrics_service_v2_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -3530,7 +4067,10 @@ def test_metrics_service_v2_transport_channel_mtls_with_adc( def test_log_metric_path(): project = "squid" metric = "clam" - expected = "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + expected = "projects/{project}/metrics/{metric}".format( + project=project, + metric=metric, + ) actual = MetricsServiceV2Client.log_metric_path(project, metric) assert expected == actual @@ -3546,9 +4086,12 @@ def test_parse_log_metric_path(): actual = MetricsServiceV2Client.parse_log_metric_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = MetricsServiceV2Client.common_billing_account_path(billing_account) assert expected == actual @@ -3563,9 +4106,12 @@ def test_parse_common_billing_account_path(): actual = MetricsServiceV2Client.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = MetricsServiceV2Client.common_folder_path(folder) assert expected == actual @@ -3580,9 +4126,12 @@ def test_parse_common_folder_path(): actual = MetricsServiceV2Client.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = MetricsServiceV2Client.common_organization_path(organization) assert expected == actual @@ -3597,9 +4146,12 @@ def test_parse_common_organization_path(): actual = MetricsServiceV2Client.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = MetricsServiceV2Client.common_project_path(project) assert expected == actual @@ -3614,10 +4166,14 @@ def test_parse_common_project_path(): actual = MetricsServiceV2Client.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = MetricsServiceV2Client.common_location_path(project, location) assert expected == actual @@ -3637,14 +4193,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MetricsServiceV2Transport, "_prep_wrapped_messages" + ) as prep: client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MetricsServiceV2Transport, "_prep_wrapped_messages" + ) as prep: transport_class = MetricsServiceV2Client.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -3655,7 +4215,8 @@ def test_client_with_default_client_info(): def test_cancel_operation(transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3675,10 +4236,12 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3688,9 +4251,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3713,7 +4274,7 @@ def test_cancel_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. @@ -3723,7 +4284,11 @@ def test_cancel_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): @@ -3738,9 +4303,7 @@ async def test_cancel_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3749,7 +4312,10 @@ async def test_cancel_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_cancel_operation_from_dict(): @@ -3768,6 +4334,7 @@ def test_cancel_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = MetricsServiceV2AsyncClient( @@ -3776,9 +4343,7 @@ async def test_cancel_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation( request={ "name": "locations", @@ -3802,6 +4367,7 @@ def test_cancel_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.CancelOperationRequest() + @pytest.mark.asyncio async def test_cancel_operation_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -3810,9 +4376,7 @@ async def test_cancel_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3822,7 +4386,8 @@ async def test_cancel_operation_flattened_async(): def test_get_operation(transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3842,10 +4407,12 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3890,7 +4457,11 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): @@ -3916,7 +4487,10 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_get_operation_from_dict(): @@ -3935,6 +4509,7 @@ def test_get_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = MetricsServiceV2AsyncClient( @@ -3969,6 +4544,7 @@ def test_get_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.GetOperationRequest() + @pytest.mark.asyncio async def test_get_operation_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -3989,7 +4565,8 @@ async def test_get_operation_flattened_async(): def test_list_operations(transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4009,10 +4586,12 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4057,7 +4636,11 @@ def test_list_operations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_operations_field_headers_async(): @@ -4083,7 +4666,10 @@ async def test_list_operations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_operations_from_dict(): @@ -4102,6 +4688,7 @@ def test_list_operations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = MetricsServiceV2AsyncClient( @@ -4136,6 +4723,7 @@ def test_list_operations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.ListOperationsRequest() + @pytest.mark.asyncio async def test_list_operations_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -4156,10 +4744,11 @@ async def test_list_operations_flattened_async(): def test_transport_close_grpc(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -4168,10 +4757,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -4179,12 +4769,11 @@ async def test_transport_close_grpc_asyncio(): def test_client_ctx(): transports = [ - 'grpc', + "grpc", ] for transport in transports: client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -4193,10 +4782,14 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -4211,7 +4804,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py index 182a272e3182..8c65d450e2cd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py @@ -28,7 +28,6 @@ import os import shlex import sys -import logging from typing import Any # If extensions (or modules to document with autodoc) are in another directory, @@ -83,9 +82,9 @@ root_doc = "index" # General information about the project. -project = u"google-cloud-logging" -copyright = u"2025, Google, LLC" -author = u"Google APIs" +project = "google-cloud-logging" +copyright = "2025, Google, LLC" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -285,7 +284,7 @@ ( root_doc, "google-cloud-logging.tex", - u"google-cloud-logging Documentation", + "google-cloud-logging Documentation", author, "manual", ) @@ -386,6 +385,7 @@ napoleon_use_param = True napoleon_use_rtype = True + # Setup for sphinx behaviors such as warning filters. class UnexpectedUnindentFilter(logging.Filter): """Filter out warnings about unexpected unindentation following bullet lists.""" @@ -413,5 +413,5 @@ def setup(app: Any) -> None: """ # Sphinx's logger is hierarchical. Adding a filter to the # root 'sphinx' logger will catch warnings from all sub-loggers. - logger = logging.getLogger('sphinx') + logger = logging.getLogger("sphinx") logger.addFilter(UnexpectedUnindentFilter()) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py index 8e644e881d12..c436ddd91386 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py @@ -18,170 +18,190 @@ __version__ = package_version.__version__ -from google.cloud.logging_v2.services.config_service_v2.client import BaseConfigServiceV2Client -from google.cloud.logging_v2.services.config_service_v2.async_client import BaseConfigServiceV2AsyncClient -from google.cloud.logging_v2.services.logging_service_v2.client import LoggingServiceV2Client -from google.cloud.logging_v2.services.logging_service_v2.async_client import LoggingServiceV2AsyncClient -from google.cloud.logging_v2.services.metrics_service_v2.client import BaseMetricsServiceV2Client -from google.cloud.logging_v2.services.metrics_service_v2.async_client import BaseMetricsServiceV2AsyncClient - -from google.cloud.logging_v2.types.log_entry import LogEntry -from google.cloud.logging_v2.types.log_entry import LogEntryOperation -from google.cloud.logging_v2.types.log_entry import LogEntrySourceLocation -from google.cloud.logging_v2.types.log_entry import LogSplit -from google.cloud.logging_v2.types.logging import DeleteLogRequest -from google.cloud.logging_v2.types.logging import ListLogEntriesRequest -from google.cloud.logging_v2.types.logging import ListLogEntriesResponse -from google.cloud.logging_v2.types.logging import ListLogsRequest -from google.cloud.logging_v2.types.logging import ListLogsResponse -from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsRequest -from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsResponse -from google.cloud.logging_v2.types.logging import TailLogEntriesRequest -from google.cloud.logging_v2.types.logging import TailLogEntriesResponse -from google.cloud.logging_v2.types.logging import WriteLogEntriesPartialErrors -from google.cloud.logging_v2.types.logging import WriteLogEntriesRequest -from google.cloud.logging_v2.types.logging import WriteLogEntriesResponse -from google.cloud.logging_v2.types.logging_config import BigQueryDataset -from google.cloud.logging_v2.types.logging_config import BigQueryOptions -from google.cloud.logging_v2.types.logging_config import BucketMetadata -from google.cloud.logging_v2.types.logging_config import CmekSettings -from google.cloud.logging_v2.types.logging_config import CopyLogEntriesMetadata -from google.cloud.logging_v2.types.logging_config import CopyLogEntriesRequest -from google.cloud.logging_v2.types.logging_config import CopyLogEntriesResponse -from google.cloud.logging_v2.types.logging_config import CreateBucketRequest -from google.cloud.logging_v2.types.logging_config import CreateExclusionRequest -from google.cloud.logging_v2.types.logging_config import CreateLinkRequest -from google.cloud.logging_v2.types.logging_config import CreateSinkRequest -from google.cloud.logging_v2.types.logging_config import CreateViewRequest -from google.cloud.logging_v2.types.logging_config import DeleteBucketRequest -from google.cloud.logging_v2.types.logging_config import DeleteExclusionRequest -from google.cloud.logging_v2.types.logging_config import DeleteLinkRequest -from google.cloud.logging_v2.types.logging_config import DeleteSinkRequest -from google.cloud.logging_v2.types.logging_config import DeleteViewRequest -from google.cloud.logging_v2.types.logging_config import GetBucketRequest -from google.cloud.logging_v2.types.logging_config import GetCmekSettingsRequest -from google.cloud.logging_v2.types.logging_config import GetExclusionRequest -from google.cloud.logging_v2.types.logging_config import GetLinkRequest -from google.cloud.logging_v2.types.logging_config import GetSettingsRequest -from google.cloud.logging_v2.types.logging_config import GetSinkRequest -from google.cloud.logging_v2.types.logging_config import GetViewRequest -from google.cloud.logging_v2.types.logging_config import IndexConfig -from google.cloud.logging_v2.types.logging_config import Link -from google.cloud.logging_v2.types.logging_config import LinkMetadata -from google.cloud.logging_v2.types.logging_config import ListBucketsRequest -from google.cloud.logging_v2.types.logging_config import ListBucketsResponse -from google.cloud.logging_v2.types.logging_config import ListExclusionsRequest -from google.cloud.logging_v2.types.logging_config import ListExclusionsResponse -from google.cloud.logging_v2.types.logging_config import ListLinksRequest -from google.cloud.logging_v2.types.logging_config import ListLinksResponse -from google.cloud.logging_v2.types.logging_config import ListSinksRequest -from google.cloud.logging_v2.types.logging_config import ListSinksResponse -from google.cloud.logging_v2.types.logging_config import ListViewsRequest -from google.cloud.logging_v2.types.logging_config import ListViewsResponse -from google.cloud.logging_v2.types.logging_config import LocationMetadata -from google.cloud.logging_v2.types.logging_config import LogBucket -from google.cloud.logging_v2.types.logging_config import LogExclusion -from google.cloud.logging_v2.types.logging_config import LogSink -from google.cloud.logging_v2.types.logging_config import LogView -from google.cloud.logging_v2.types.logging_config import Settings -from google.cloud.logging_v2.types.logging_config import UndeleteBucketRequest -from google.cloud.logging_v2.types.logging_config import UpdateBucketRequest -from google.cloud.logging_v2.types.logging_config import UpdateCmekSettingsRequest -from google.cloud.logging_v2.types.logging_config import UpdateExclusionRequest -from google.cloud.logging_v2.types.logging_config import UpdateSettingsRequest -from google.cloud.logging_v2.types.logging_config import UpdateSinkRequest -from google.cloud.logging_v2.types.logging_config import UpdateViewRequest -from google.cloud.logging_v2.types.logging_config import IndexType -from google.cloud.logging_v2.types.logging_config import LifecycleState -from google.cloud.logging_v2.types.logging_config import OperationState -from google.cloud.logging_v2.types.logging_metrics import CreateLogMetricRequest -from google.cloud.logging_v2.types.logging_metrics import DeleteLogMetricRequest -from google.cloud.logging_v2.types.logging_metrics import GetLogMetricRequest -from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsRequest -from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsResponse -from google.cloud.logging_v2.types.logging_metrics import LogMetric -from google.cloud.logging_v2.types.logging_metrics import UpdateLogMetricRequest +from google.cloud.logging_v2.services.config_service_v2.async_client import ( + BaseConfigServiceV2AsyncClient, +) +from google.cloud.logging_v2.services.config_service_v2.client import ( + BaseConfigServiceV2Client, +) +from google.cloud.logging_v2.services.logging_service_v2.async_client import ( + LoggingServiceV2AsyncClient, +) +from google.cloud.logging_v2.services.logging_service_v2.client import ( + LoggingServiceV2Client, +) +from google.cloud.logging_v2.services.metrics_service_v2.async_client import ( + BaseMetricsServiceV2AsyncClient, +) +from google.cloud.logging_v2.services.metrics_service_v2.client import ( + BaseMetricsServiceV2Client, +) +from google.cloud.logging_v2.types.log_entry import ( + LogEntry, + LogEntryOperation, + LogEntrySourceLocation, + LogSplit, +) +from google.cloud.logging_v2.types.logging import ( + DeleteLogRequest, + ListLogEntriesRequest, + ListLogEntriesResponse, + ListLogsRequest, + ListLogsResponse, + ListMonitoredResourceDescriptorsRequest, + ListMonitoredResourceDescriptorsResponse, + TailLogEntriesRequest, + TailLogEntriesResponse, + WriteLogEntriesPartialErrors, + WriteLogEntriesRequest, + WriteLogEntriesResponse, +) +from google.cloud.logging_v2.types.logging_config import ( + BigQueryDataset, + BigQueryOptions, + BucketMetadata, + CmekSettings, + CopyLogEntriesMetadata, + CopyLogEntriesRequest, + CopyLogEntriesResponse, + CreateBucketRequest, + CreateExclusionRequest, + CreateLinkRequest, + CreateSinkRequest, + CreateViewRequest, + DeleteBucketRequest, + DeleteExclusionRequest, + DeleteLinkRequest, + DeleteSinkRequest, + DeleteViewRequest, + GetBucketRequest, + GetCmekSettingsRequest, + GetExclusionRequest, + GetLinkRequest, + GetSettingsRequest, + GetSinkRequest, + GetViewRequest, + IndexConfig, + IndexType, + LifecycleState, + Link, + LinkMetadata, + ListBucketsRequest, + ListBucketsResponse, + ListExclusionsRequest, + ListExclusionsResponse, + ListLinksRequest, + ListLinksResponse, + ListSinksRequest, + ListSinksResponse, + ListViewsRequest, + ListViewsResponse, + LocationMetadata, + LogBucket, + LogExclusion, + LogSink, + LogView, + OperationState, + Settings, + UndeleteBucketRequest, + UpdateBucketRequest, + UpdateCmekSettingsRequest, + UpdateExclusionRequest, + UpdateSettingsRequest, + UpdateSinkRequest, + UpdateViewRequest, +) +from google.cloud.logging_v2.types.logging_metrics import ( + CreateLogMetricRequest, + DeleteLogMetricRequest, + GetLogMetricRequest, + ListLogMetricsRequest, + ListLogMetricsResponse, + LogMetric, + UpdateLogMetricRequest, +) -__all__ = ('BaseConfigServiceV2Client', - 'BaseConfigServiceV2AsyncClient', - 'LoggingServiceV2Client', - 'LoggingServiceV2AsyncClient', - 'BaseMetricsServiceV2Client', - 'BaseMetricsServiceV2AsyncClient', - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', - 'DeleteLogRequest', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'BigQueryDataset', - 'BigQueryOptions', - 'BucketMetadata', - 'CmekSettings', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesRequest', - 'CopyLogEntriesResponse', - 'CreateBucketRequest', - 'CreateExclusionRequest', - 'CreateLinkRequest', - 'CreateSinkRequest', - 'CreateViewRequest', - 'DeleteBucketRequest', - 'DeleteExclusionRequest', - 'DeleteLinkRequest', - 'DeleteSinkRequest', - 'DeleteViewRequest', - 'GetBucketRequest', - 'GetCmekSettingsRequest', - 'GetExclusionRequest', - 'GetLinkRequest', - 'GetSettingsRequest', - 'GetSinkRequest', - 'GetViewRequest', - 'IndexConfig', - 'Link', - 'LinkMetadata', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'ListLinksRequest', - 'ListLinksResponse', - 'ListSinksRequest', - 'ListSinksResponse', - 'ListViewsRequest', - 'ListViewsResponse', - 'LocationMetadata', - 'LogBucket', - 'LogExclusion', - 'LogSink', - 'LogView', - 'Settings', - 'UndeleteBucketRequest', - 'UpdateBucketRequest', - 'UpdateCmekSettingsRequest', - 'UpdateExclusionRequest', - 'UpdateSettingsRequest', - 'UpdateSinkRequest', - 'UpdateViewRequest', - 'IndexType', - 'LifecycleState', - 'OperationState', - 'CreateLogMetricRequest', - 'DeleteLogMetricRequest', - 'GetLogMetricRequest', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'LogMetric', - 'UpdateLogMetricRequest', +__all__ = ( + "BaseConfigServiceV2Client", + "BaseConfigServiceV2AsyncClient", + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient", + "BaseMetricsServiceV2Client", + "BaseMetricsServiceV2AsyncClient", + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "Settings", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "IndexType", + "LifecycleState", + "OperationState", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "LogMetric", + "UpdateLogMetricRequest", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py index 38f3dc49f500..6583139a3e21 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.logging_v2 import gapic_version as package_version +import sys import google.api_core as api_core -import sys +from google.cloud.logging_v2 import gapic_version as package_version __version__ = package_version.__version__ @@ -28,117 +28,136 @@ import importlib_metadata as metadata -from .services.config_service_v2 import BaseConfigServiceV2Client -from .services.config_service_v2 import BaseConfigServiceV2AsyncClient -from .services.logging_service_v2 import LoggingServiceV2Client -from .services.logging_service_v2 import LoggingServiceV2AsyncClient -from .services.metrics_service_v2 import BaseMetricsServiceV2Client -from .services.metrics_service_v2 import BaseMetricsServiceV2AsyncClient - -from .types.log_entry import LogEntry -from .types.log_entry import LogEntryOperation -from .types.log_entry import LogEntrySourceLocation -from .types.log_entry import LogSplit -from .types.logging import DeleteLogRequest -from .types.logging import ListLogEntriesRequest -from .types.logging import ListLogEntriesResponse -from .types.logging import ListLogsRequest -from .types.logging import ListLogsResponse -from .types.logging import ListMonitoredResourceDescriptorsRequest -from .types.logging import ListMonitoredResourceDescriptorsResponse -from .types.logging import TailLogEntriesRequest -from .types.logging import TailLogEntriesResponse -from .types.logging import WriteLogEntriesPartialErrors -from .types.logging import WriteLogEntriesRequest -from .types.logging import WriteLogEntriesResponse -from .types.logging_config import BigQueryDataset -from .types.logging_config import BigQueryOptions -from .types.logging_config import BucketMetadata -from .types.logging_config import CmekSettings -from .types.logging_config import CopyLogEntriesMetadata -from .types.logging_config import CopyLogEntriesRequest -from .types.logging_config import CopyLogEntriesResponse -from .types.logging_config import CreateBucketRequest -from .types.logging_config import CreateExclusionRequest -from .types.logging_config import CreateLinkRequest -from .types.logging_config import CreateSinkRequest -from .types.logging_config import CreateViewRequest -from .types.logging_config import DeleteBucketRequest -from .types.logging_config import DeleteExclusionRequest -from .types.logging_config import DeleteLinkRequest -from .types.logging_config import DeleteSinkRequest -from .types.logging_config import DeleteViewRequest -from .types.logging_config import GetBucketRequest -from .types.logging_config import GetCmekSettingsRequest -from .types.logging_config import GetExclusionRequest -from .types.logging_config import GetLinkRequest -from .types.logging_config import GetSettingsRequest -from .types.logging_config import GetSinkRequest -from .types.logging_config import GetViewRequest -from .types.logging_config import IndexConfig -from .types.logging_config import Link -from .types.logging_config import LinkMetadata -from .types.logging_config import ListBucketsRequest -from .types.logging_config import ListBucketsResponse -from .types.logging_config import ListExclusionsRequest -from .types.logging_config import ListExclusionsResponse -from .types.logging_config import ListLinksRequest -from .types.logging_config import ListLinksResponse -from .types.logging_config import ListSinksRequest -from .types.logging_config import ListSinksResponse -from .types.logging_config import ListViewsRequest -from .types.logging_config import ListViewsResponse -from .types.logging_config import LocationMetadata -from .types.logging_config import LogBucket -from .types.logging_config import LogExclusion -from .types.logging_config import LogSink -from .types.logging_config import LogView -from .types.logging_config import Settings -from .types.logging_config import UndeleteBucketRequest -from .types.logging_config import UpdateBucketRequest -from .types.logging_config import UpdateCmekSettingsRequest -from .types.logging_config import UpdateExclusionRequest -from .types.logging_config import UpdateSettingsRequest -from .types.logging_config import UpdateSinkRequest -from .types.logging_config import UpdateViewRequest -from .types.logging_config import IndexType -from .types.logging_config import LifecycleState -from .types.logging_config import OperationState -from .types.logging_metrics import CreateLogMetricRequest -from .types.logging_metrics import DeleteLogMetricRequest -from .types.logging_metrics import GetLogMetricRequest -from .types.logging_metrics import ListLogMetricsRequest -from .types.logging_metrics import ListLogMetricsResponse -from .types.logging_metrics import LogMetric -from .types.logging_metrics import UpdateLogMetricRequest +from .services.config_service_v2 import ( + BaseConfigServiceV2AsyncClient, + BaseConfigServiceV2Client, +) +from .services.logging_service_v2 import ( + LoggingServiceV2AsyncClient, + LoggingServiceV2Client, +) +from .services.metrics_service_v2 import ( + BaseMetricsServiceV2AsyncClient, + BaseMetricsServiceV2Client, +) +from .types.log_entry import ( + LogEntry, + LogEntryOperation, + LogEntrySourceLocation, + LogSplit, +) +from .types.logging import ( + DeleteLogRequest, + ListLogEntriesRequest, + ListLogEntriesResponse, + ListLogsRequest, + ListLogsResponse, + ListMonitoredResourceDescriptorsRequest, + ListMonitoredResourceDescriptorsResponse, + TailLogEntriesRequest, + TailLogEntriesResponse, + WriteLogEntriesPartialErrors, + WriteLogEntriesRequest, + WriteLogEntriesResponse, +) +from .types.logging_config import ( + BigQueryDataset, + BigQueryOptions, + BucketMetadata, + CmekSettings, + CopyLogEntriesMetadata, + CopyLogEntriesRequest, + CopyLogEntriesResponse, + CreateBucketRequest, + CreateExclusionRequest, + CreateLinkRequest, + CreateSinkRequest, + CreateViewRequest, + DeleteBucketRequest, + DeleteExclusionRequest, + DeleteLinkRequest, + DeleteSinkRequest, + DeleteViewRequest, + GetBucketRequest, + GetCmekSettingsRequest, + GetExclusionRequest, + GetLinkRequest, + GetSettingsRequest, + GetSinkRequest, + GetViewRequest, + IndexConfig, + IndexType, + LifecycleState, + Link, + LinkMetadata, + ListBucketsRequest, + ListBucketsResponse, + ListExclusionsRequest, + ListExclusionsResponse, + ListLinksRequest, + ListLinksResponse, + ListSinksRequest, + ListSinksResponse, + ListViewsRequest, + ListViewsResponse, + LocationMetadata, + LogBucket, + LogExclusion, + LogSink, + LogView, + OperationState, + Settings, + UndeleteBucketRequest, + UpdateBucketRequest, + UpdateCmekSettingsRequest, + UpdateExclusionRequest, + UpdateSettingsRequest, + UpdateSinkRequest, + UpdateViewRequest, +) +from .types.logging_metrics import ( + CreateLogMetricRequest, + DeleteLogMetricRequest, + GetLogMetricRequest, + ListLogMetricsRequest, + ListLogMetricsResponse, + LogMetric, + UpdateLogMetricRequest, +) -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.logging_v2") # type: ignore - api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr( + api_core, "check_dependency_versions" +): # pragma: NO COVER + api_core.check_python_version("google.cloud.logging_v2") # type: ignore + api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: - import warnings import sys + import warnings _py_version_str = sys.version.split()[0] _package_label = "google.cloud.logging_v2" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -176,107 +195,111 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'BaseConfigServiceV2AsyncClient', - 'BaseMetricsServiceV2AsyncClient', - 'LoggingServiceV2AsyncClient', -'BaseConfigServiceV2Client', -'BaseMetricsServiceV2Client', -'BigQueryDataset', -'BigQueryOptions', -'BucketMetadata', -'CmekSettings', -'CopyLogEntriesMetadata', -'CopyLogEntriesRequest', -'CopyLogEntriesResponse', -'CreateBucketRequest', -'CreateExclusionRequest', -'CreateLinkRequest', -'CreateLogMetricRequest', -'CreateSinkRequest', -'CreateViewRequest', -'DeleteBucketRequest', -'DeleteExclusionRequest', -'DeleteLinkRequest', -'DeleteLogMetricRequest', -'DeleteLogRequest', -'DeleteSinkRequest', -'DeleteViewRequest', -'GetBucketRequest', -'GetCmekSettingsRequest', -'GetExclusionRequest', -'GetLinkRequest', -'GetLogMetricRequest', -'GetSettingsRequest', -'GetSinkRequest', -'GetViewRequest', -'IndexConfig', -'IndexType', -'LifecycleState', -'Link', -'LinkMetadata', -'ListBucketsRequest', -'ListBucketsResponse', -'ListExclusionsRequest', -'ListExclusionsResponse', -'ListLinksRequest', -'ListLinksResponse', -'ListLogEntriesRequest', -'ListLogEntriesResponse', -'ListLogMetricsRequest', -'ListLogMetricsResponse', -'ListLogsRequest', -'ListLogsResponse', -'ListMonitoredResourceDescriptorsRequest', -'ListMonitoredResourceDescriptorsResponse', -'ListSinksRequest', -'ListSinksResponse', -'ListViewsRequest', -'ListViewsResponse', -'LocationMetadata', -'LogBucket', -'LogEntry', -'LogEntryOperation', -'LogEntrySourceLocation', -'LogExclusion', -'LogMetric', -'LogSink', -'LogSplit', -'LogView', -'LoggingServiceV2Client', -'OperationState', -'Settings', -'TailLogEntriesRequest', -'TailLogEntriesResponse', -'UndeleteBucketRequest', -'UpdateBucketRequest', -'UpdateCmekSettingsRequest', -'UpdateExclusionRequest', -'UpdateLogMetricRequest', -'UpdateSettingsRequest', -'UpdateSinkRequest', -'UpdateViewRequest', -'WriteLogEntriesPartialErrors', -'WriteLogEntriesRequest', -'WriteLogEntriesResponse', + "BaseConfigServiceV2AsyncClient", + "BaseMetricsServiceV2AsyncClient", + "LoggingServiceV2AsyncClient", + "BaseConfigServiceV2Client", + "BaseMetricsServiceV2Client", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateLogMetricRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteLogMetricRequest", + "DeleteLogRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetLogMetricRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "IndexType", + "LifecycleState", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogExclusion", + "LogMetric", + "LogSink", + "LogSplit", + "LogView", + "LoggingServiceV2Client", + "OperationState", + "Settings", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateLogMetricRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py index 189db0d9c351..ec25a15a30c9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import BaseConfigServiceV2Client from .async_client import BaseConfigServiceV2AsyncClient +from .client import BaseConfigServiceV2Client __all__ = ( - 'BaseConfigServiceV2Client', - 'BaseConfigServiceV2AsyncClient', + "BaseConfigServiceV2Client", + "BaseConfigServiceV2AsyncClient", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py index 654e5c6584e7..9c023c0dc526 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -14,46 +14,58 @@ # limitations under the License. # import logging as std_logging -from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.logging_v2 import gapic_version as package_version +from collections import OrderedDict +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.logging_v2.services.config_service_v2 import pagers -from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore + from .client import BaseConfigServiceV2Client +from .transports.base import DEFAULT_CLIENT_INFO, ConfigServiceV2Transport +from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class BaseConfigServiceV2AsyncClient: """Service for configuring sinks used to route log entries.""" @@ -67,29 +79,49 @@ class BaseConfigServiceV2AsyncClient: _DEFAULT_UNIVERSE = BaseConfigServiceV2Client._DEFAULT_UNIVERSE cmek_settings_path = staticmethod(BaseConfigServiceV2Client.cmek_settings_path) - parse_cmek_settings_path = staticmethod(BaseConfigServiceV2Client.parse_cmek_settings_path) + parse_cmek_settings_path = staticmethod( + BaseConfigServiceV2Client.parse_cmek_settings_path + ) link_path = staticmethod(BaseConfigServiceV2Client.link_path) parse_link_path = staticmethod(BaseConfigServiceV2Client.parse_link_path) log_bucket_path = staticmethod(BaseConfigServiceV2Client.log_bucket_path) - parse_log_bucket_path = staticmethod(BaseConfigServiceV2Client.parse_log_bucket_path) + parse_log_bucket_path = staticmethod( + BaseConfigServiceV2Client.parse_log_bucket_path + ) log_exclusion_path = staticmethod(BaseConfigServiceV2Client.log_exclusion_path) - parse_log_exclusion_path = staticmethod(BaseConfigServiceV2Client.parse_log_exclusion_path) + parse_log_exclusion_path = staticmethod( + BaseConfigServiceV2Client.parse_log_exclusion_path + ) log_sink_path = staticmethod(BaseConfigServiceV2Client.log_sink_path) parse_log_sink_path = staticmethod(BaseConfigServiceV2Client.parse_log_sink_path) log_view_path = staticmethod(BaseConfigServiceV2Client.log_view_path) parse_log_view_path = staticmethod(BaseConfigServiceV2Client.parse_log_view_path) settings_path = staticmethod(BaseConfigServiceV2Client.settings_path) parse_settings_path = staticmethod(BaseConfigServiceV2Client.parse_settings_path) - common_billing_account_path = staticmethod(BaseConfigServiceV2Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(BaseConfigServiceV2Client.parse_common_billing_account_path) + common_billing_account_path = staticmethod( + BaseConfigServiceV2Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + BaseConfigServiceV2Client.parse_common_billing_account_path + ) common_folder_path = staticmethod(BaseConfigServiceV2Client.common_folder_path) - parse_common_folder_path = staticmethod(BaseConfigServiceV2Client.parse_common_folder_path) - common_organization_path = staticmethod(BaseConfigServiceV2Client.common_organization_path) - parse_common_organization_path = staticmethod(BaseConfigServiceV2Client.parse_common_organization_path) + parse_common_folder_path = staticmethod( + BaseConfigServiceV2Client.parse_common_folder_path + ) + common_organization_path = staticmethod( + BaseConfigServiceV2Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + BaseConfigServiceV2Client.parse_common_organization_path + ) common_project_path = staticmethod(BaseConfigServiceV2Client.common_project_path) - parse_common_project_path = staticmethod(BaseConfigServiceV2Client.parse_common_project_path) + parse_common_project_path = staticmethod( + BaseConfigServiceV2Client.parse_common_project_path + ) common_location_path = staticmethod(BaseConfigServiceV2Client.common_location_path) - parse_common_location_path = staticmethod(BaseConfigServiceV2Client.parse_common_location_path) + parse_common_location_path = staticmethod( + BaseConfigServiceV2Client.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -131,7 +163,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -162,7 +196,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return BaseConfigServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return BaseConfigServiceV2Client.get_mtls_endpoint_and_cert_source( + client_options + ) # type: ignore @property def transport(self) -> ConfigServiceV2Transport: @@ -194,12 +230,18 @@ def universe_domain(self) -> str: get_transport_class = BaseConfigServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the base config service v2 async client. Args: @@ -254,31 +296,39 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.BaseConfigServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.ConfigServiceV2", "credentialsType": None, - } + }, ) - async def list_buckets(self, - request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBucketsAsyncPager: + async def list_buckets( + self, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketsAsyncPager: r"""Lists log buckets. .. code-block:: python @@ -350,10 +400,14 @@ async def sample_list_buckets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -367,14 +421,14 @@ async def sample_list_buckets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_buckets] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_buckets + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -402,13 +456,14 @@ async def sample_list_buckets(): # Done; return the response. return response - async def get_bucket(self, - request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def get_bucket( + self, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Gets a log bucket. .. code-block:: python @@ -462,14 +517,14 @@ async def sample_get_bucket(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_bucket] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_bucket + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -486,13 +541,14 @@ async def sample_get_bucket(): # Done; return the response. return response - async def create_bucket_async(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_bucket_async( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Creates a log bucket asynchronously that can be used to store log entries. After a bucket has been created, the bucket's location @@ -557,14 +613,14 @@ async def sample_create_bucket_async(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_bucket_async] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_bucket_async + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -589,13 +645,14 @@ async def sample_create_bucket_async(): # Done; return the response. return response - async def update_bucket_async(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_bucket_async( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Updates a log bucket asynchronously. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -662,14 +719,14 @@ async def sample_update_bucket_async(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_bucket_async] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_bucket_async + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -694,13 +751,14 @@ async def sample_update_bucket_async(): # Done; return the response. return response - async def create_bucket(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def create_bucket( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed. @@ -757,14 +815,14 @@ async def sample_create_bucket(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_bucket] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_bucket + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -781,13 +839,14 @@ async def sample_create_bucket(): # Done; return the response. return response - async def update_bucket(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def update_bucket( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -847,14 +906,14 @@ async def sample_update_bucket(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_bucket] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_bucket + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -871,13 +930,14 @@ async def sample_update_bucket(): # Done; return the response. return response - async def delete_bucket(self, - request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_bucket( + self, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a log bucket. Changes the bucket's ``lifecycle_state`` to the @@ -927,14 +987,14 @@ async def sample_delete_bucket(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_bucket] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_bucket + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -948,13 +1008,14 @@ async def sample_delete_bucket(): metadata=metadata, ) - async def undelete_bucket(self, - request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def undelete_bucket( + self, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 days. @@ -1001,14 +1062,14 @@ async def sample_undelete_bucket(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.undelete_bucket] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.undelete_bucket + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1022,14 +1083,15 @@ async def sample_undelete_bucket(): metadata=metadata, ) - async def _list_views(self, - request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListViewsAsyncPager: + async def _list_views( + self, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListViewsAsyncPager: r"""Lists views on a log bucket. .. code-block:: python @@ -1093,10 +1155,14 @@ async def sample_list_views(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1110,14 +1176,14 @@ async def sample_list_views(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_views] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_views + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1145,13 +1211,14 @@ async def sample_list_views(): # Done; return the response. return response - async def _get_view(self, - request: Optional[Union[logging_config.GetViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def _get_view( + self, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Gets a view on a log bucket.. .. code-block:: python @@ -1210,9 +1277,7 @@ async def sample_get_view(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1229,13 +1294,14 @@ async def sample_get_view(): # Done; return the response. return response - async def _create_view(self, - request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def _create_view( + self, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1291,14 +1357,14 @@ async def sample_create_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_view] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_view + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1315,13 +1381,14 @@ async def sample_create_view(): # Done; return the response. return response - async def _update_view(self, - request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def _update_view( + self, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new view: ``filter``. If an ``UNAVAILABLE`` error is returned, this @@ -1379,14 +1446,14 @@ async def sample_update_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_view] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_view + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1403,13 +1470,14 @@ async def sample_update_view(): # Done; return the response. return response - async def _delete_view(self, - request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def _delete_view( + self, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it can delete the view. If this occurs, please try again in a few @@ -1457,14 +1525,14 @@ async def sample_delete_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_view] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_view + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1478,14 +1546,15 @@ async def sample_delete_view(): metadata=metadata, ) - async def _list_sinks(self, - request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSinksAsyncPager: + async def _list_sinks( + self, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. .. code-block:: python @@ -1552,10 +1621,14 @@ async def sample_list_sinks(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1569,14 +1642,14 @@ async def sample_list_sinks(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_sinks] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sinks + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1604,14 +1677,15 @@ async def sample_list_sinks(): # Done; return the response. return response - async def _get_sink(self, - request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def _get_sink( + self, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Gets a sink. .. code-block:: python @@ -1685,10 +1759,14 @@ async def sample_get_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1707,9 +1785,9 @@ async def sample_get_sink(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -1726,15 +1804,16 @@ async def sample_get_sink(): # Done; return the response. return response - async def _create_sink(self, - request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def _create_sink( + self, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's ``writer_identity`` is not @@ -1824,10 +1903,14 @@ async def sample_create_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, sink] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1843,14 +1926,14 @@ async def sample_create_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_sink] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_sink + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1867,16 +1950,17 @@ async def sample_create_sink(): # Done; return the response. return response - async def _update_sink(self, - request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def _update_sink( + self, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and ``filter``. @@ -1990,10 +2074,14 @@ async def sample_update_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name, sink, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2011,14 +2099,16 @@ async def sample_update_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_sink] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_sink + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -2035,14 +2125,15 @@ async def sample_update_sink(): # Done; return the response. return response - async def _delete_sink(self, - request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def _delete_sink( + self, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2102,10 +2193,14 @@ async def sample_delete_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2119,14 +2214,16 @@ async def sample_delete_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_sink] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_sink + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -2140,16 +2237,17 @@ async def sample_delete_sink(): metadata=metadata, ) - async def _create_link(self, - request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - link: Optional[logging_config.Link] = None, - link_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def _create_link( + self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs stored in the log bucket. A log bucket may currently @@ -2237,10 +2335,14 @@ async def sample_create_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, link, link_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2258,14 +2360,14 @@ async def sample_create_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_link] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_link + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2290,14 +2392,15 @@ async def sample_create_link(): # Done; return the response. return response - async def _delete_link(self, - request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def _delete_link( + self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2373,10 +2476,14 @@ async def sample_delete_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2390,14 +2497,14 @@ async def sample_delete_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_link] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_link + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2422,14 +2529,15 @@ async def sample_delete_link(): # Done; return the response. return response - async def _list_links(self, - request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLinksAsyncPager: + async def _list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLinksAsyncPager: r"""Lists links. .. code-block:: python @@ -2495,10 +2603,14 @@ async def sample_list_links(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2512,14 +2624,14 @@ async def sample_list_links(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_links] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_links + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2547,14 +2659,15 @@ async def sample_list_links(): # Done; return the response. return response - async def _get_link(self, - request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Link: + async def _get_link( + self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: r"""Gets a link. .. code-block:: python @@ -2615,10 +2728,14 @@ async def sample_get_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2637,9 +2754,7 @@ async def sample_get_link(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2656,14 +2771,15 @@ async def sample_get_link(): # Done; return the response. return response - async def _list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListExclusionsAsyncPager: + async def _list_exclusions( + self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -2731,10 +2847,14 @@ async def sample_list_exclusions(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2748,14 +2868,14 @@ async def sample_list_exclusions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_exclusions] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_exclusions + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2783,14 +2903,15 @@ async def sample_list_exclusions(): # Done; return the response. return response - async def _get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def _get_exclusion( + self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. .. code-block:: python @@ -2862,10 +2983,14 @@ async def sample_get_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2879,14 +3004,14 @@ async def sample_get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_exclusion] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2903,15 +3028,16 @@ async def sample_get_exclusion(): # Done; return the response. return response - async def _create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, - *, - parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def _create_exclusion( + self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. @@ -3000,10 +3126,14 @@ async def sample_create_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, exclusion] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3019,14 +3149,14 @@ async def sample_create_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_exclusion] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3043,16 +3173,17 @@ async def sample_create_exclusion(): # Done; return the response. return response - async def _update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def _update_exclusion( + self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3152,10 +3283,14 @@ async def sample_update_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, exclusion, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3173,14 +3308,14 @@ async def sample_update_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_exclusion] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3197,14 +3332,15 @@ async def sample_update_exclusion(): # Done; return the response. return response - async def _delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def _delete_exclusion( + self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an exclusion in the \_Default sink. .. code-block:: python @@ -3263,10 +3399,14 @@ async def sample_delete_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3280,14 +3420,14 @@ async def sample_delete_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_exclusion] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3301,13 +3441,14 @@ async def sample_delete_exclusion(): metadata=metadata, ) - async def _get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + async def _get_cmek_settings( + self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. Note: CMEK for the Log Router can be configured for Google Cloud @@ -3385,14 +3526,14 @@ async def sample_get_cmek_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_cmek_settings] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_cmek_settings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3409,13 +3550,14 @@ async def sample_get_cmek_settings(): # Done; return the response. return response - async def _update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + async def _update_cmek_settings( + self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. Note: CMEK for the Log Router can currently only be configured @@ -3498,14 +3640,14 @@ async def sample_update_cmek_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_cmek_settings] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_cmek_settings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3522,14 +3664,15 @@ async def sample_update_cmek_settings(): # Done; return the response. return response - async def _get_settings(self, - request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + async def _get_settings( + self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. Note: Settings for the Log Router can be get for Google Cloud @@ -3619,10 +3762,14 @@ async def sample_get_settings(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3636,14 +3783,14 @@ async def sample_get_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_settings] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_settings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3660,15 +3807,16 @@ async def sample_get_settings(): # Done; return the response. return response - async def _update_settings(self, - request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, - *, - settings: Optional[logging_config.Settings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + async def _update_settings( + self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. Note: Settings for the Log Router can currently only be @@ -3765,10 +3913,14 @@ async def sample_update_settings(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [settings, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3784,14 +3936,14 @@ async def sample_update_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_settings] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_settings + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3808,13 +3960,14 @@ async def sample_update_settings(): # Done; return the response. return response - async def _copy_log_entries(self, - request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def _copy_log_entries( + self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -3878,7 +4031,9 @@ async def sample_copy_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.copy_log_entries] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.copy_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -3944,8 +4099,7 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -3953,7 +4107,11 @@ async def list_operations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4000,8 +4158,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -4009,7 +4166,11 @@ async def get_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4059,15 +4220,19 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "BaseConfigServiceV2AsyncClient": return self @@ -4075,12 +4240,13 @@ async def __aenter__(self) -> "BaseConfigServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "BaseConfigServiceV2AsyncClient", -) +__all__ = ("BaseConfigServiceV2AsyncClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py index a597989ec546..d424dcd38eb9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py @@ -13,27 +13,38 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -from http import HTTPStatus import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) -from google.cloud.logging_v2 import gapic_version as package_version - +import google.protobuf from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -42,21 +53,23 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) -from google.cloud.logging_v2.services.config_service_v2 import pagers -from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore + +from .transports.base import DEFAULT_CLIENT_INFO, ConfigServiceV2Transport from .transports.grpc import ConfigServiceV2GrpcTransport from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport @@ -68,13 +81,15 @@ class BaseConfigServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] _transport_registry["grpc"] = ConfigServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[ConfigServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ConfigServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -150,14 +165,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -196,8 +213,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: BaseConfigServiceV2Client: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -214,139 +230,220 @@ def transport(self) -> ConfigServiceV2Transport: return self._transport @staticmethod - def cmek_settings_path(project: str,) -> str: + def cmek_settings_path( + project: str, + ) -> str: """Returns a fully-qualified cmek_settings string.""" - return "projects/{project}/cmekSettings".format(project=project, ) + return "projects/{project}/cmekSettings".format( + project=project, + ) @staticmethod - def parse_cmek_settings_path(path: str) -> Dict[str,str]: + def parse_cmek_settings_path(path: str) -> Dict[str, str]: """Parses a cmek_settings path into its component segments.""" m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) return m.groupdict() if m else {} @staticmethod - def link_path(project: str,location: str,bucket: str,link: str,) -> str: + def link_path( + project: str, + location: str, + bucket: str, + link: str, + ) -> str: """Returns a fully-qualified link string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format( + project=project, + location=location, + bucket=bucket, + link=link, + ) @staticmethod - def parse_link_path(path: str) -> Dict[str,str]: + def parse_link_path(path: str) -> Dict[str, str]: """Parses a link path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/links/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/links/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def log_bucket_path(project: str,location: str,bucket: str,) -> str: + def log_bucket_path( + project: str, + location: str, + bucket: str, + ) -> str: """Returns a fully-qualified log_bucket string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + return "projects/{project}/locations/{location}/buckets/{bucket}".format( + project=project, + location=location, + bucket=bucket, + ) @staticmethod - def parse_log_bucket_path(path: str) -> Dict[str,str]: + def parse_log_bucket_path(path: str) -> Dict[str, str]: """Parses a log_bucket path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def log_exclusion_path(project: str,exclusion: str,) -> str: + def log_exclusion_path( + project: str, + exclusion: str, + ) -> str: """Returns a fully-qualified log_exclusion string.""" - return "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + return "projects/{project}/exclusions/{exclusion}".format( + project=project, + exclusion=exclusion, + ) @staticmethod - def parse_log_exclusion_path(path: str) -> Dict[str,str]: + def parse_log_exclusion_path(path: str) -> Dict[str, str]: """Parses a log_exclusion path into its component segments.""" m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_sink_path(project: str,sink: str,) -> str: + def log_sink_path( + project: str, + sink: str, + ) -> str: """Returns a fully-qualified log_sink string.""" - return "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + return "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) @staticmethod - def parse_log_sink_path(path: str) -> Dict[str,str]: + def parse_log_sink_path(path: str) -> Dict[str, str]: """Parses a log_sink path into its component segments.""" m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_view_path(project: str,location: str,bucket: str,view: str,) -> str: + def log_view_path( + project: str, + location: str, + bucket: str, + view: str, + ) -> str: """Returns a fully-qualified log_view string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, + location=location, + bucket=bucket, + view=view, + ) @staticmethod - def parse_log_view_path(path: str) -> Dict[str,str]: + def parse_log_view_path(path: str) -> Dict[str, str]: """Parses a log_view path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def settings_path(project: str,) -> str: + def settings_path( + project: str, + ) -> str: """Returns a fully-qualified settings string.""" - return "projects/{project}/settings".format(project=project, ) + return "projects/{project}/settings".format( + project=project, + ) @staticmethod - def parse_settings_path(path: str) -> Dict[str,str]: + def parse_settings_path(path: str) -> Dict[str, str]: """Parses a settings path into its component segments.""" m = re.match(r"^projects/(?P.+?)/settings$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -378,14 +475,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = BaseConfigServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -398,7 +499,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -423,7 +526,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -446,7 +551,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint) -> str: + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: """Return the API endpoint used by the client. Args: @@ -462,17 +569,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = BaseConfigServiceV2Client._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -508,15 +623,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -549,12 +667,18 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the base config service v2 client. Args: @@ -609,13 +733,21 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BaseConfigServiceV2Client._read_environment_variables() - self._client_cert_source = BaseConfigServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = BaseConfigServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + BaseConfigServiceV2Client._read_environment_variables() + ) + self._client_cert_source = BaseConfigServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = BaseConfigServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. @@ -627,7 +759,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -636,30 +770,40 @@ def __init__(self, *, if transport_provided: # transport is a ConfigServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly." ) self._transport = cast(ConfigServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - BaseConfigServiceV2Client._get_api_endpoint( + self._api_endpoint = ( + self._api_endpoint + or BaseConfigServiceV2Client._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, - self._use_mtls_endpoint)) + self._use_mtls_endpoint, + ) + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport]] = ( + transport_init: Union[ + Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport] + ] = ( BaseConfigServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConfigServiceV2Transport], transport) @@ -678,28 +822,37 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.BaseConfigServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.ConfigServiceV2", "credentialsType": None, - } + }, ) - def list_buckets(self, - request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBucketsPager: + def list_buckets( + self, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketsPager: r"""Lists log buckets. .. code-block:: python @@ -771,10 +924,14 @@ def sample_list_buckets(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -792,9 +949,7 @@ def sample_list_buckets(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -822,13 +977,14 @@ def sample_list_buckets(): # Done; return the response. return response - def get_bucket(self, - request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def get_bucket( + self, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Gets a log bucket. .. code-block:: python @@ -887,9 +1043,7 @@ def sample_get_bucket(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -906,13 +1060,14 @@ def sample_get_bucket(): # Done; return the response. return response - def create_bucket_async(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_bucket_async( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Creates a log bucket asynchronously that can be used to store log entries. After a bucket has been created, the bucket's location @@ -982,9 +1137,7 @@ def sample_create_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1009,13 +1162,14 @@ def sample_create_bucket_async(): # Done; return the response. return response - def update_bucket_async(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_bucket_async( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Updates a log bucket asynchronously. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -1087,9 +1241,7 @@ def sample_update_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1114,13 +1266,14 @@ def sample_update_bucket_async(): # Done; return the response. return response - def create_bucket(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def create_bucket( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed. @@ -1182,9 +1335,7 @@ def sample_create_bucket(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1201,13 +1352,14 @@ def sample_create_bucket(): # Done; return the response. return response - def update_bucket(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def update_bucket( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -1272,9 +1424,7 @@ def sample_update_bucket(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1291,13 +1441,14 @@ def sample_update_bucket(): # Done; return the response. return response - def delete_bucket(self, - request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_bucket( + self, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a log bucket. Changes the bucket's ``lifecycle_state`` to the @@ -1352,9 +1503,7 @@ def sample_delete_bucket(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1368,13 +1517,14 @@ def sample_delete_bucket(): metadata=metadata, ) - def undelete_bucket(self, - request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def undelete_bucket( + self, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 days. @@ -1426,9 +1576,7 @@ def sample_undelete_bucket(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1442,14 +1590,15 @@ def sample_undelete_bucket(): metadata=metadata, ) - def _list_views(self, - request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListViewsPager: + def _list_views( + self, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListViewsPager: r"""Lists views on a log bucket. .. code-block:: python @@ -1513,10 +1662,14 @@ def sample_list_views(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1534,9 +1687,7 @@ def sample_list_views(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1564,13 +1715,14 @@ def sample_list_views(): # Done; return the response. return response - def _get_view(self, - request: Optional[Union[logging_config.GetViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def _get_view( + self, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Gets a view on a log bucket.. .. code-block:: python @@ -1629,9 +1781,7 @@ def sample_get_view(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1648,13 +1798,14 @@ def sample_get_view(): # Done; return the response. return response - def _create_view(self, - request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def _create_view( + self, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1715,9 +1866,7 @@ def sample_create_view(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1734,13 +1883,14 @@ def sample_create_view(): # Done; return the response. return response - def _update_view(self, - request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def _update_view( + self, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new view: ``filter``. If an ``UNAVAILABLE`` error is returned, this @@ -1803,9 +1953,7 @@ def sample_update_view(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1822,13 +1970,14 @@ def sample_update_view(): # Done; return the response. return response - def _delete_view(self, - request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def _delete_view( + self, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it can delete the view. If this occurs, please try again in a few @@ -1881,9 +2030,7 @@ def sample_delete_view(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1897,14 +2044,15 @@ def sample_delete_view(): metadata=metadata, ) - def _list_sinks(self, - request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSinksPager: + def _list_sinks( + self, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSinksPager: r"""Lists sinks. .. code-block:: python @@ -1971,10 +2119,14 @@ def sample_list_sinks(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1992,9 +2144,7 @@ def sample_list_sinks(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2022,14 +2172,15 @@ def sample_list_sinks(): # Done; return the response. return response - def _get_sink(self, - request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def _get_sink( + self, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Gets a sink. .. code-block:: python @@ -2103,10 +2254,14 @@ def sample_get_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2124,9 +2279,9 @@ def sample_get_sink(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -2143,15 +2298,16 @@ def sample_get_sink(): # Done; return the response. return response - def _create_sink(self, - request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def _create_sink( + self, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's ``writer_identity`` is not @@ -2241,10 +2397,14 @@ def sample_create_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, sink] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2264,9 +2424,7 @@ def sample_create_sink(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2283,16 +2441,17 @@ def sample_create_sink(): # Done; return the response. return response - def _update_sink(self, - request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def _update_sink( + self, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and ``filter``. @@ -2406,10 +2565,14 @@ def sample_update_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name, sink, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2431,9 +2594,9 @@ def sample_update_sink(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -2450,14 +2613,15 @@ def sample_update_sink(): # Done; return the response. return response - def _delete_sink(self, - request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def _delete_sink( + self, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2517,10 +2681,14 @@ def sample_delete_sink(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [sink_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2538,9 +2706,9 @@ def sample_delete_sink(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("sink_name", request.sink_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("sink_name", request.sink_name),) + ), ) # Validate the universe domain. @@ -2554,16 +2722,17 @@ def sample_delete_sink(): metadata=metadata, ) - def _create_link(self, - request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - link: Optional[logging_config.Link] = None, - link_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def _create_link( + self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs stored in the log bucket. A log bucket may currently @@ -2651,10 +2820,14 @@ def sample_create_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, link, link_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2676,9 +2849,7 @@ def sample_create_link(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2703,14 +2874,15 @@ def sample_create_link(): # Done; return the response. return response - def _delete_link(self, - request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def _delete_link( + self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2786,10 +2958,14 @@ def sample_delete_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2807,9 +2983,7 @@ def sample_delete_link(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2834,14 +3008,15 @@ def sample_delete_link(): # Done; return the response. return response - def _list_links(self, - request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLinksPager: + def _list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLinksPager: r"""Lists links. .. code-block:: python @@ -2907,10 +3082,14 @@ def sample_list_links(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2928,9 +3107,7 @@ def sample_list_links(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -2958,14 +3135,15 @@ def sample_list_links(): # Done; return the response. return response - def _get_link(self, - request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Link: + def _get_link( + self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: r"""Gets a link. .. code-block:: python @@ -3026,10 +3204,14 @@ def sample_get_link(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3047,9 +3229,7 @@ def sample_get_link(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3066,14 +3246,15 @@ def sample_get_link(): # Done; return the response. return response - def _list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListExclusionsPager: + def _list_exclusions( + self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -3141,10 +3322,14 @@ def sample_list_exclusions(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3162,9 +3347,7 @@ def sample_list_exclusions(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3192,14 +3375,15 @@ def sample_list_exclusions(): # Done; return the response. return response - def _get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def _get_exclusion( + self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. .. code-block:: python @@ -3271,10 +3455,14 @@ def sample_get_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3292,9 +3480,7 @@ def sample_get_exclusion(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3311,15 +3497,16 @@ def sample_get_exclusion(): # Done; return the response. return response - def _create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, - *, - parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def _create_exclusion( + self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. @@ -3408,10 +3595,14 @@ def sample_create_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, exclusion] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3431,9 +3622,7 @@ def sample_create_exclusion(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -3450,16 +3639,17 @@ def sample_create_exclusion(): # Done; return the response. return response - def _update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def _update_exclusion( + self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3559,10 +3749,14 @@ def sample_update_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, exclusion, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3584,9 +3778,7 @@ def sample_update_exclusion(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3603,14 +3795,15 @@ def sample_update_exclusion(): # Done; return the response. return response - def _delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def _delete_exclusion( + self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an exclusion in the \_Default sink. .. code-block:: python @@ -3669,10 +3862,14 @@ def sample_delete_exclusion(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3690,9 +3887,7 @@ def sample_delete_exclusion(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3706,13 +3901,14 @@ def sample_delete_exclusion(): metadata=metadata, ) - def _get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + def _get_cmek_settings( + self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. Note: CMEK for the Log Router can be configured for Google Cloud @@ -3795,9 +3991,7 @@ def sample_get_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3814,13 +4008,14 @@ def sample_get_cmek_settings(): # Done; return the response. return response - def _update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + def _update_cmek_settings( + self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. Note: CMEK for the Log Router can currently only be configured @@ -3908,9 +4103,7 @@ def sample_update_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3927,14 +4120,15 @@ def sample_update_cmek_settings(): # Done; return the response. return response - def _get_settings(self, - request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + def _get_settings( + self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. Note: Settings for the Log Router can be get for Google Cloud @@ -4024,10 +4218,14 @@ def sample_get_settings(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -4045,9 +4243,7 @@ def sample_get_settings(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -4064,15 +4260,16 @@ def sample_get_settings(): # Done; return the response. return response - def _update_settings(self, - request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, - *, - settings: Optional[logging_config.Settings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + def _update_settings( + self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. Note: Settings for the Log Router can currently only be @@ -4169,10 +4366,14 @@ def sample_update_settings(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [settings, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -4192,9 +4393,7 @@ def sample_update_settings(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -4211,13 +4410,14 @@ def sample_update_settings(): # Done; return the response. return response - def _copy_log_entries(self, - request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def _copy_log_entries( + self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -4360,8 +4560,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -4370,7 +4569,11 @@ def list_operations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4420,8 +4623,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -4430,7 +4632,11 @@ def get_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4483,27 +4689,26 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "BaseConfigServiceV2Client", -) +__all__ = ("BaseConfigServiceV2Client",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py index 1af6b54c9924..2083d0423914 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -13,13 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore @@ -44,14 +58,17 @@ class ListBucketsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListBucketsResponse], - request: logging_config.ListBucketsRequest, - response: logging_config.ListBucketsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListBucketsResponse], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -84,7 +101,12 @@ def pages(self) -> Iterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogBucket]: @@ -92,7 +114,7 @@ def __iter__(self) -> Iterator[logging_config.LogBucket]: yield from page.buckets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListBucketsAsyncPager: @@ -112,14 +134,17 @@ class ListBucketsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], - request: logging_config.ListBucketsRequest, - response: logging_config.ListBucketsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -152,8 +177,14 @@ async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: async def async_generator(): async for page in self.pages: @@ -163,7 +194,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListViewsPager: @@ -183,14 +214,17 @@ class ListViewsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListViewsResponse], - request: logging_config.ListViewsRequest, - response: logging_config.ListViewsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListViewsResponse], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -223,7 +257,12 @@ def pages(self) -> Iterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogView]: @@ -231,7 +270,7 @@ def __iter__(self) -> Iterator[logging_config.LogView]: yield from page.views def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListViewsAsyncPager: @@ -251,14 +290,17 @@ class ListViewsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListViewsResponse]], - request: logging_config.ListViewsRequest, - response: logging_config.ListViewsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListViewsResponse]], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -291,8 +333,14 @@ async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogView]: async def async_generator(): async for page in self.pages: @@ -302,7 +350,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSinksPager: @@ -322,14 +370,17 @@ class ListSinksPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListSinksResponse], - request: logging_config.ListSinksRequest, - response: logging_config.ListSinksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListSinksResponse], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -362,7 +413,12 @@ def pages(self) -> Iterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogSink]: @@ -370,7 +426,7 @@ def __iter__(self) -> Iterator[logging_config.LogSink]: yield from page.sinks def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSinksAsyncPager: @@ -390,14 +446,17 @@ class ListSinksAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListSinksResponse]], - request: logging_config.ListSinksRequest, - response: logging_config.ListSinksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListSinksResponse]], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -430,8 +489,14 @@ async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: async def async_generator(): async for page in self.pages: @@ -441,7 +506,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLinksPager: @@ -461,14 +526,17 @@ class ListLinksPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListLinksResponse], - request: logging_config.ListLinksRequest, - response: logging_config.ListLinksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListLinksResponse], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -501,7 +569,12 @@ def pages(self) -> Iterator[logging_config.ListLinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.Link]: @@ -509,7 +582,7 @@ def __iter__(self) -> Iterator[logging_config.Link]: yield from page.links def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLinksAsyncPager: @@ -529,14 +602,17 @@ class ListLinksAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListLinksResponse]], - request: logging_config.ListLinksRequest, - response: logging_config.ListLinksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListLinksResponse]], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -569,8 +645,14 @@ async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.Link]: async def async_generator(): async for page in self.pages: @@ -580,7 +662,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListExclusionsPager: @@ -600,14 +682,17 @@ class ListExclusionsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListExclusionsResponse], - request: logging_config.ListExclusionsRequest, - response: logging_config.ListExclusionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListExclusionsResponse], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -640,7 +725,12 @@ def pages(self) -> Iterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogExclusion]: @@ -648,7 +738,7 @@ def __iter__(self) -> Iterator[logging_config.LogExclusion]: yield from page.exclusions def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListExclusionsAsyncPager: @@ -668,14 +758,17 @@ class ListExclusionsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], - request: logging_config.ListExclusionsRequest, - response: logging_config.ListExclusionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -708,8 +801,14 @@ async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: async def async_generator(): async for page in self.pages: @@ -719,4 +818,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index cc3da21c119f..790c53bfdcdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -20,14 +20,13 @@ from .grpc import ConfigServiceV2GrpcTransport from .grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport - # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] -_transport_registry['grpc'] = ConfigServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = ConfigServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = ConfigServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport __all__ = ( - 'ConfigServiceV2Transport', - 'ConfigServiceV2GrpcTransport', - 'ConfigServiceV2GrpcAsyncIOTransport', + "ConfigServiceV2Transport", + "ConfigServiceV2GrpcTransport", + "ConfigServiceV2GrpcAsyncIOTransport", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py index b3f67b0461bc..b8222d13fc5f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -16,23 +16,22 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.logging_v2 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core +import google.auth # type: ignore +import google.protobuf +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.cloud.logging_v2 import gapic_version as package_version from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -42,26 +41,27 @@ class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", ) - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -100,31 +100,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host self._wrapped_methods: Dict[Callable, Callable] = {} @@ -388,14 +400,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -405,291 +417,306 @@ def operations_client(self): raise NotImplementedError() @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - Union[ - logging_config.ListBucketsResponse, - Awaitable[logging_config.ListBucketsResponse] - ]]: + def list_buckets( + self, + ) -> Callable[ + [logging_config.ListBucketsRequest], + Union[ + logging_config.ListBucketsResponse, + Awaitable[logging_config.ListBucketsResponse], + ], + ]: raise NotImplementedError() @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def get_bucket( + self, + ) -> Callable[ + [logging_config.GetBucketRequest], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], + ]: raise NotImplementedError() @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_bucket_async( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_bucket_async( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def create_bucket( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], + ]: raise NotImplementedError() @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def update_bucket( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], + ]: raise NotImplementedError() @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_bucket( + self, + ) -> Callable[ + [logging_config.DeleteBucketRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def undelete_bucket( + self, + ) -> Callable[ + [logging_config.UndeleteBucketRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - Union[ - logging_config.ListViewsResponse, - Awaitable[logging_config.ListViewsResponse] - ]]: + def list_views( + self, + ) -> Callable[ + [logging_config.ListViewsRequest], + Union[ + logging_config.ListViewsResponse, + Awaitable[logging_config.ListViewsResponse], + ], + ]: raise NotImplementedError() @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def get_view( + self, + ) -> Callable[ + [logging_config.GetViewRequest], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], + ]: raise NotImplementedError() @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def create_view( + self, + ) -> Callable[ + [logging_config.CreateViewRequest], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], + ]: raise NotImplementedError() @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def update_view( + self, + ) -> Callable[ + [logging_config.UpdateViewRequest], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], + ]: raise NotImplementedError() @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_view( + self, + ) -> Callable[ + [logging_config.DeleteViewRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - Union[ - logging_config.ListSinksResponse, - Awaitable[logging_config.ListSinksResponse] - ]]: + def list_sinks( + self, + ) -> Callable[ + [logging_config.ListSinksRequest], + Union[ + logging_config.ListSinksResponse, + Awaitable[logging_config.ListSinksResponse], + ], + ]: raise NotImplementedError() @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def get_sink( + self, + ) -> Callable[ + [logging_config.GetSinkRequest], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], + ]: raise NotImplementedError() @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def create_sink( + self, + ) -> Callable[ + [logging_config.CreateSinkRequest], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], + ]: raise NotImplementedError() @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def update_sink( + self, + ) -> Callable[ + [logging_config.UpdateSinkRequest], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], + ]: raise NotImplementedError() @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_sink( + self, + ) -> Callable[ + [logging_config.DeleteSinkRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_link( + self, + ) -> Callable[ + [logging_config.CreateLinkRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_link( + self, + ) -> Callable[ + [logging_config.DeleteLinkRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - Union[ - logging_config.ListLinksResponse, - Awaitable[logging_config.ListLinksResponse] - ]]: + def list_links( + self, + ) -> Callable[ + [logging_config.ListLinksRequest], + Union[ + logging_config.ListLinksResponse, + Awaitable[logging_config.ListLinksResponse], + ], + ]: raise NotImplementedError() @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - Union[ - logging_config.Link, - Awaitable[logging_config.Link] - ]]: + def get_link( + self, + ) -> Callable[ + [logging_config.GetLinkRequest], + Union[logging_config.Link, Awaitable[logging_config.Link]], + ]: raise NotImplementedError() @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - Union[ - logging_config.ListExclusionsResponse, - Awaitable[logging_config.ListExclusionsResponse] - ]]: + def list_exclusions( + self, + ) -> Callable[ + [logging_config.ListExclusionsRequest], + Union[ + logging_config.ListExclusionsResponse, + Awaitable[logging_config.ListExclusionsResponse], + ], + ]: raise NotImplementedError() @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def get_exclusion( + self, + ) -> Callable[ + [logging_config.GetExclusionRequest], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], + ]: raise NotImplementedError() @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def create_exclusion( + self, + ) -> Callable[ + [logging_config.CreateExclusionRequest], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], + ]: raise NotImplementedError() @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def update_exclusion( + self, + ) -> Callable[ + [logging_config.UpdateExclusionRequest], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], + ]: raise NotImplementedError() @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_exclusion( + self, + ) -> Callable[ + [logging_config.DeleteExclusionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - Union[ - logging_config.CmekSettings, - Awaitable[logging_config.CmekSettings] - ]]: + def get_cmek_settings( + self, + ) -> Callable[ + [logging_config.GetCmekSettingsRequest], + Union[logging_config.CmekSettings, Awaitable[logging_config.CmekSettings]], + ]: raise NotImplementedError() @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - Union[ - logging_config.CmekSettings, - Awaitable[logging_config.CmekSettings] - ]]: + def update_cmek_settings( + self, + ) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Union[logging_config.CmekSettings, Awaitable[logging_config.CmekSettings]], + ]: raise NotImplementedError() @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - Union[ - logging_config.Settings, - Awaitable[logging_config.Settings] - ]]: + def get_settings( + self, + ) -> Callable[ + [logging_config.GetSettingsRequest], + Union[logging_config.Settings, Awaitable[logging_config.Settings]], + ]: raise NotImplementedError() @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - Union[ - logging_config.Settings, - Awaitable[logging_config.Settings] - ]]: + def update_settings( + self, + ) -> Callable[ + [logging_config.UpdateSettingsRequest], + Union[logging_config.Settings, Awaitable[logging_config.Settings]], + ]: raise NotImplementedError() @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def copy_log_entries( + self, + ) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property @@ -697,7 +724,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -724,6 +754,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'ConfigServiceV2Transport', -) +__all__ = ("ConfigServiceV2Transport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 200ae0f81db5..17dbcf73cadc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -19,25 +19,23 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson +import google.auth # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.message - import grpc # type: ignore import proto # type: ignore - +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson + +from .base import DEFAULT_CLIENT_INFO, ConfigServiceV2Transport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -47,7 +45,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -68,7 +68,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -79,7 +79,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -94,7 +98,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -116,23 +120,26 @@ class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -260,19 +267,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -308,13 +319,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -334,9 +344,11 @@ def operations_client(self) -> operations_v1.OperationsClient: return self._operations_client @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - logging_config.ListBucketsResponse]: + def list_buckets( + self, + ) -> Callable[ + [logging_config.ListBucketsRequest], logging_config.ListBucketsResponse + ]: r"""Return a callable for the list buckets method over gRPC. Lists log buckets. @@ -351,18 +363,18 @@ def list_buckets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListBuckets', + if "list_buckets" not in self._stubs: + self._stubs["list_buckets"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, ) - return self._stubs['list_buckets'] + return self._stubs["list_buckets"] @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - logging_config.LogBucket]: + def get_bucket( + self, + ) -> Callable[[logging_config.GetBucketRequest], logging_config.LogBucket]: r"""Return a callable for the get bucket method over gRPC. Gets a log bucket. @@ -377,18 +389,18 @@ def get_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetBucket', + if "get_bucket" not in self._stubs: + self._stubs["get_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['get_bucket'] + return self._stubs["get_bucket"] @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - operations_pb2.Operation]: + def create_bucket_async( + self, + ) -> Callable[[logging_config.CreateBucketRequest], operations_pb2.Operation]: r"""Return a callable for the create bucket async method over gRPC. Creates a log bucket asynchronously that can be used @@ -406,18 +418,18 @@ def create_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_bucket_async'] + return self._stubs["create_bucket_async"] @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - operations_pb2.Operation]: + def update_bucket_async( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], operations_pb2.Operation]: r"""Return a callable for the update bucket async method over gRPC. Updates a log bucket asynchronously. @@ -438,18 +450,18 @@ def update_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_bucket_async'] + return self._stubs["update_bucket_async"] @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - logging_config.LogBucket]: + def create_bucket( + self, + ) -> Callable[[logging_config.CreateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the create bucket method over gRPC. Creates a log bucket that can be used to store log @@ -466,18 +478,18 @@ def create_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucket', + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['create_bucket'] + return self._stubs["create_bucket"] @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - logging_config.LogBucket]: + def update_bucket( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the update bucket method over gRPC. Updates a log bucket. @@ -498,18 +510,18 @@ def update_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucket', + if "update_bucket" not in self._stubs: + self._stubs["update_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['update_bucket'] + return self._stubs["update_bucket"] @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - empty_pb2.Empty]: + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the delete bucket method over gRPC. Deletes a log bucket. @@ -529,18 +541,18 @@ def delete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteBucket', + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_bucket'] + return self._stubs["delete_bucket"] @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - empty_pb2.Empty]: + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a log bucket. A bucket that has been @@ -557,18 +569,18 @@ def undelete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['undelete_bucket'] + return self._stubs["undelete_bucket"] @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - logging_config.ListViewsResponse]: + def list_views( + self, + ) -> Callable[[logging_config.ListViewsRequest], logging_config.ListViewsResponse]: r"""Return a callable for the list views method over gRPC. Lists views on a log bucket. @@ -583,18 +595,18 @@ def list_views(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_views' not in self._stubs: - self._stubs['list_views'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListViews', + if "list_views" not in self._stubs: + self._stubs["list_views"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, ) - return self._stubs['list_views'] + return self._stubs["list_views"] @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - logging_config.LogView]: + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], logging_config.LogView]: r"""Return a callable for the get view method over gRPC. Gets a view on a log bucket.. @@ -609,18 +621,18 @@ def get_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_view' not in self._stubs: - self._stubs['get_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetView', + if "get_view" not in self._stubs: + self._stubs["get_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['get_view'] + return self._stubs["get_view"] @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - logging_config.LogView]: + def create_view( + self, + ) -> Callable[[logging_config.CreateViewRequest], logging_config.LogView]: r"""Return a callable for the create view method over gRPC. Creates a view over log entries in a log bucket. A @@ -636,18 +648,18 @@ def create_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_view' not in self._stubs: - self._stubs['create_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateView', + if "create_view" not in self._stubs: + self._stubs["create_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['create_view'] + return self._stubs["create_view"] @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - logging_config.LogView]: + def update_view( + self, + ) -> Callable[[logging_config.UpdateViewRequest], logging_config.LogView]: r"""Return a callable for the update view method over gRPC. Updates a view on a log bucket. This method replaces the @@ -666,18 +678,18 @@ def update_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_view' not in self._stubs: - self._stubs['update_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateView', + if "update_view" not in self._stubs: + self._stubs["update_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['update_view'] + return self._stubs["update_view"] @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - empty_pb2.Empty]: + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is @@ -695,18 +707,18 @@ def delete_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteView', + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_view'] + return self._stubs["delete_view"] @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - logging_config.ListSinksResponse]: + def list_sinks( + self, + ) -> Callable[[logging_config.ListSinksRequest], logging_config.ListSinksResponse]: r"""Return a callable for the list sinks method over gRPC. Lists sinks. @@ -721,18 +733,18 @@ def list_sinks(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', + if "list_sinks" not in self._stubs: + self._stubs["list_sinks"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, ) - return self._stubs['list_sinks'] + return self._stubs["list_sinks"] @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - logging_config.LogSink]: + def get_sink( + self, + ) -> Callable[[logging_config.GetSinkRequest], logging_config.LogSink]: r"""Return a callable for the get sink method over gRPC. Gets a sink. @@ -747,18 +759,18 @@ def get_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', + if "get_sink" not in self._stubs: + self._stubs["get_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['get_sink'] + return self._stubs["get_sink"] @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - logging_config.LogSink]: + def create_sink( + self, + ) -> Callable[[logging_config.CreateSinkRequest], logging_config.LogSink]: r"""Return a callable for the create sink method over gRPC. Creates a sink that exports specified log entries to a @@ -777,18 +789,18 @@ def create_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', + if "create_sink" not in self._stubs: + self._stubs["create_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['create_sink'] + return self._stubs["create_sink"] @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - logging_config.LogSink]: + def update_sink( + self, + ) -> Callable[[logging_config.UpdateSinkRequest], logging_config.LogSink]: r"""Return a callable for the update sink method over gRPC. Updates a sink. This method replaces the following fields in the @@ -808,18 +820,18 @@ def update_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', + if "update_sink" not in self._stubs: + self._stubs["update_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['update_sink'] + return self._stubs["update_sink"] @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - empty_pb2.Empty]: + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], empty_pb2.Empty]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -835,18 +847,18 @@ def delete_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', + if "delete_sink" not in self._stubs: + self._stubs["delete_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_sink'] + return self._stubs["delete_sink"] @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - operations_pb2.Operation]: + def create_link( + self, + ) -> Callable[[logging_config.CreateLinkRequest], operations_pb2.Operation]: r"""Return a callable for the create link method over gRPC. Asynchronously creates a linked dataset in BigQuery @@ -864,18 +876,18 @@ def create_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_link' not in self._stubs: - self._stubs['create_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateLink', + if "create_link" not in self._stubs: + self._stubs["create_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_link'] + return self._stubs["create_link"] @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - operations_pb2.Operation]: + def delete_link( + self, + ) -> Callable[[logging_config.DeleteLinkRequest], operations_pb2.Operation]: r"""Return a callable for the delete link method over gRPC. Deletes a link. This will also delete the @@ -891,18 +903,18 @@ def delete_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteLink', + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_link'] + return self._stubs["delete_link"] @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - logging_config.ListLinksResponse]: + def list_links( + self, + ) -> Callable[[logging_config.ListLinksRequest], logging_config.ListLinksResponse]: r"""Return a callable for the list links method over gRPC. Lists links. @@ -917,18 +929,18 @@ def list_links(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_links' not in self._stubs: - self._stubs['list_links'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListLinks', + if "list_links" not in self._stubs: + self._stubs["list_links"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, ) - return self._stubs['list_links'] + return self._stubs["list_links"] @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - logging_config.Link]: + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], logging_config.Link]: r"""Return a callable for the get link method over gRPC. Gets a link. @@ -943,18 +955,20 @@ def get_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_link' not in self._stubs: - self._stubs['get_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetLink', + if "get_link" not in self._stubs: + self._stubs["get_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, ) - return self._stubs['get_link'] + return self._stubs["get_link"] @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - logging_config.ListExclusionsResponse]: + def list_exclusions( + self, + ) -> Callable[ + [logging_config.ListExclusionsRequest], logging_config.ListExclusionsResponse + ]: r"""Return a callable for the list exclusions method over gRPC. Lists all the exclusions on the \_Default sink in a parent @@ -970,18 +984,18 @@ def list_exclusions(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', + if "list_exclusions" not in self._stubs: + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, ) - return self._stubs['list_exclusions'] + return self._stubs["list_exclusions"] @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - logging_config.LogExclusion]: + def get_exclusion( + self, + ) -> Callable[[logging_config.GetExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the get exclusion method over gRPC. Gets the description of an exclusion in the \_Default sink. @@ -996,18 +1010,18 @@ def get_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', + if "get_exclusion" not in self._stubs: + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['get_exclusion'] + return self._stubs["get_exclusion"] @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - logging_config.LogExclusion]: + def create_exclusion( + self, + ) -> Callable[[logging_config.CreateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the create exclusion method over gRPC. Creates a new exclusion in the \_Default sink in a specified @@ -1024,18 +1038,18 @@ def create_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', + if "create_exclusion" not in self._stubs: + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['create_exclusion'] + return self._stubs["create_exclusion"] @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - logging_config.LogExclusion]: + def update_exclusion( + self, + ) -> Callable[[logging_config.UpdateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the update exclusion method over gRPC. Changes one or more properties of an existing exclusion in the @@ -1051,18 +1065,18 @@ def update_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + if "update_exclusion" not in self._stubs: + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['update_exclusion'] + return self._stubs["update_exclusion"] @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - empty_pb2.Empty]: + def delete_exclusion( + self, + ) -> Callable[[logging_config.DeleteExclusionRequest], empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion in the \_Default sink. @@ -1077,18 +1091,18 @@ def delete_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + if "delete_exclusion" not in self._stubs: + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_exclusion'] + return self._stubs["delete_exclusion"] @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - logging_config.CmekSettings]: + def get_cmek_settings( + self, + ) -> Callable[[logging_config.GetCmekSettingsRequest], logging_config.CmekSettings]: r"""Return a callable for the get cmek settings method over gRPC. Gets the Logging CMEK settings for the given resource. @@ -1112,18 +1126,20 @@ def get_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + if "get_cmek_settings" not in self._stubs: + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['get_cmek_settings'] + return self._stubs["get_cmek_settings"] @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - logging_config.CmekSettings]: + def update_cmek_settings( + self, + ) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], logging_config.CmekSettings + ]: r"""Return a callable for the update cmek settings method over gRPC. Updates the Log Router CMEK settings for the given resource. @@ -1152,18 +1168,18 @@ def update_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + if "update_cmek_settings" not in self._stubs: + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['update_cmek_settings'] + return self._stubs["update_cmek_settings"] @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - logging_config.Settings]: + def get_settings( + self, + ) -> Callable[[logging_config.GetSettingsRequest], logging_config.Settings]: r"""Return a callable for the get settings method over gRPC. Gets the Log Router settings for the given resource. @@ -1188,18 +1204,18 @@ def get_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSettings', + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['get_settings'] + return self._stubs["get_settings"] @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - logging_config.Settings]: + def update_settings( + self, + ) -> Callable[[logging_config.UpdateSettingsRequest], logging_config.Settings]: r"""Return a callable for the update settings method over gRPC. Updates the Log Router settings for the given resource. @@ -1231,18 +1247,18 @@ def update_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSettings', + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['update_settings'] + return self._stubs["update_settings"] @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - operations_pb2.Operation]: + def copy_log_entries( + self, + ) -> Callable[[logging_config.CopyLogEntriesRequest], operations_pb2.Operation]: r"""Return a callable for the copy log entries method over gRPC. Copies a set of log entries from a log bucket to a @@ -1258,13 +1274,13 @@ def copy_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['copy_log_entries'] + return self._stubs["copy_log_entries"] def close(self): self._logged_channel.close() @@ -1273,8 +1289,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1291,8 +1306,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1308,9 +1322,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1328,6 +1343,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'ConfigServiceV2GrpcTransport', -) +__all__ = ("ConfigServiceV2GrpcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index f230fa2a74a9..7c8d93916b96 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -15,33 +15,31 @@ # import inspect import json -import pickle import logging as std_logging +import pickle import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, ConfigServiceV2Transport from .grpc import ConfigServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -49,9 +47,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -72,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -83,7 +85,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -98,7 +104,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -125,13 +131,15 @@ class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -162,24 +170,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -309,7 +319,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -340,9 +352,12 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: return self._operations_client @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - Awaitable[logging_config.ListBucketsResponse]]: + def list_buckets( + self, + ) -> Callable[ + [logging_config.ListBucketsRequest], + Awaitable[logging_config.ListBucketsResponse], + ]: r"""Return a callable for the list buckets method over gRPC. Lists log buckets. @@ -357,18 +372,20 @@ def list_buckets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListBuckets', + if "list_buckets" not in self._stubs: + self._stubs["list_buckets"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, ) - return self._stubs['list_buckets'] + return self._stubs["list_buckets"] @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - Awaitable[logging_config.LogBucket]]: + def get_bucket( + self, + ) -> Callable[ + [logging_config.GetBucketRequest], Awaitable[logging_config.LogBucket] + ]: r"""Return a callable for the get bucket method over gRPC. Gets a log bucket. @@ -383,18 +400,20 @@ def get_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetBucket', + if "get_bucket" not in self._stubs: + self._stubs["get_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['get_bucket'] + return self._stubs["get_bucket"] @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - Awaitable[operations_pb2.Operation]]: + def create_bucket_async( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the create bucket async method over gRPC. Creates a log bucket asynchronously that can be used @@ -412,18 +431,20 @@ def create_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_bucket_async'] + return self._stubs["create_bucket_async"] @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Awaitable[operations_pb2.Operation]]: + def update_bucket_async( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the update bucket async method over gRPC. Updates a log bucket asynchronously. @@ -444,18 +465,20 @@ def update_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_bucket_async'] + return self._stubs["update_bucket_async"] @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - Awaitable[logging_config.LogBucket]]: + def create_bucket( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], Awaitable[logging_config.LogBucket] + ]: r"""Return a callable for the create bucket method over gRPC. Creates a log bucket that can be used to store log @@ -472,18 +495,20 @@ def create_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucket', + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['create_bucket'] + return self._stubs["create_bucket"] @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Awaitable[logging_config.LogBucket]]: + def update_bucket( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], Awaitable[logging_config.LogBucket] + ]: r"""Return a callable for the update bucket method over gRPC. Updates a log bucket. @@ -504,18 +529,18 @@ def update_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucket', + if "update_bucket" not in self._stubs: + self._stubs["update_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['update_bucket'] + return self._stubs["update_bucket"] @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - Awaitable[empty_pb2.Empty]]: + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete bucket method over gRPC. Deletes a log bucket. @@ -535,18 +560,18 @@ def delete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteBucket', + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_bucket'] + return self._stubs["delete_bucket"] @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - Awaitable[empty_pb2.Empty]]: + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a log bucket. A bucket that has been @@ -563,18 +588,20 @@ def undelete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['undelete_bucket'] + return self._stubs["undelete_bucket"] @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - Awaitable[logging_config.ListViewsResponse]]: + def list_views( + self, + ) -> Callable[ + [logging_config.ListViewsRequest], Awaitable[logging_config.ListViewsResponse] + ]: r"""Return a callable for the list views method over gRPC. Lists views on a log bucket. @@ -589,18 +616,18 @@ def list_views(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_views' not in self._stubs: - self._stubs['list_views'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListViews', + if "list_views" not in self._stubs: + self._stubs["list_views"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, ) - return self._stubs['list_views'] + return self._stubs["list_views"] @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - Awaitable[logging_config.LogView]]: + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the get view method over gRPC. Gets a view on a log bucket.. @@ -615,18 +642,20 @@ def get_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_view' not in self._stubs: - self._stubs['get_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetView', + if "get_view" not in self._stubs: + self._stubs["get_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['get_view'] + return self._stubs["get_view"] @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - Awaitable[logging_config.LogView]]: + def create_view( + self, + ) -> Callable[ + [logging_config.CreateViewRequest], Awaitable[logging_config.LogView] + ]: r"""Return a callable for the create view method over gRPC. Creates a view over log entries in a log bucket. A @@ -642,18 +671,20 @@ def create_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_view' not in self._stubs: - self._stubs['create_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateView', + if "create_view" not in self._stubs: + self._stubs["create_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['create_view'] + return self._stubs["create_view"] @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - Awaitable[logging_config.LogView]]: + def update_view( + self, + ) -> Callable[ + [logging_config.UpdateViewRequest], Awaitable[logging_config.LogView] + ]: r"""Return a callable for the update view method over gRPC. Updates a view on a log bucket. This method replaces the @@ -672,18 +703,18 @@ def update_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_view' not in self._stubs: - self._stubs['update_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateView', + if "update_view" not in self._stubs: + self._stubs["update_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['update_view'] + return self._stubs["update_view"] @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - Awaitable[empty_pb2.Empty]]: + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is @@ -701,18 +732,20 @@ def delete_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteView', + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_view'] + return self._stubs["delete_view"] @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - Awaitable[logging_config.ListSinksResponse]]: + def list_sinks( + self, + ) -> Callable[ + [logging_config.ListSinksRequest], Awaitable[logging_config.ListSinksResponse] + ]: r"""Return a callable for the list sinks method over gRPC. Lists sinks. @@ -727,18 +760,18 @@ def list_sinks(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', + if "list_sinks" not in self._stubs: + self._stubs["list_sinks"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, ) - return self._stubs['list_sinks'] + return self._stubs["list_sinks"] @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - Awaitable[logging_config.LogSink]]: + def get_sink( + self, + ) -> Callable[[logging_config.GetSinkRequest], Awaitable[logging_config.LogSink]]: r"""Return a callable for the get sink method over gRPC. Gets a sink. @@ -753,18 +786,20 @@ def get_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', + if "get_sink" not in self._stubs: + self._stubs["get_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['get_sink'] + return self._stubs["get_sink"] @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - Awaitable[logging_config.LogSink]]: + def create_sink( + self, + ) -> Callable[ + [logging_config.CreateSinkRequest], Awaitable[logging_config.LogSink] + ]: r"""Return a callable for the create sink method over gRPC. Creates a sink that exports specified log entries to a @@ -783,18 +818,20 @@ def create_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', + if "create_sink" not in self._stubs: + self._stubs["create_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['create_sink'] + return self._stubs["create_sink"] @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - Awaitable[logging_config.LogSink]]: + def update_sink( + self, + ) -> Callable[ + [logging_config.UpdateSinkRequest], Awaitable[logging_config.LogSink] + ]: r"""Return a callable for the update sink method over gRPC. Updates a sink. This method replaces the following fields in the @@ -814,18 +851,18 @@ def update_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', + if "update_sink" not in self._stubs: + self._stubs["update_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['update_sink'] + return self._stubs["update_sink"] @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - Awaitable[empty_pb2.Empty]]: + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -841,18 +878,20 @@ def delete_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', + if "delete_sink" not in self._stubs: + self._stubs["delete_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_sink'] + return self._stubs["delete_sink"] @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - Awaitable[operations_pb2.Operation]]: + def create_link( + self, + ) -> Callable[ + [logging_config.CreateLinkRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the create link method over gRPC. Asynchronously creates a linked dataset in BigQuery @@ -870,18 +909,20 @@ def create_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_link' not in self._stubs: - self._stubs['create_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateLink', + if "create_link" not in self._stubs: + self._stubs["create_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_link'] + return self._stubs["create_link"] @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - Awaitable[operations_pb2.Operation]]: + def delete_link( + self, + ) -> Callable[ + [logging_config.DeleteLinkRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the delete link method over gRPC. Deletes a link. This will also delete the @@ -897,18 +938,20 @@ def delete_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteLink', + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_link'] + return self._stubs["delete_link"] @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - Awaitable[logging_config.ListLinksResponse]]: + def list_links( + self, + ) -> Callable[ + [logging_config.ListLinksRequest], Awaitable[logging_config.ListLinksResponse] + ]: r"""Return a callable for the list links method over gRPC. Lists links. @@ -923,18 +966,18 @@ def list_links(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_links' not in self._stubs: - self._stubs['list_links'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListLinks', + if "list_links" not in self._stubs: + self._stubs["list_links"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, ) - return self._stubs['list_links'] + return self._stubs["list_links"] @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - Awaitable[logging_config.Link]]: + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], Awaitable[logging_config.Link]]: r"""Return a callable for the get link method over gRPC. Gets a link. @@ -949,18 +992,21 @@ def get_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_link' not in self._stubs: - self._stubs['get_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetLink', + if "get_link" not in self._stubs: + self._stubs["get_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, ) - return self._stubs['get_link'] + return self._stubs["get_link"] @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - Awaitable[logging_config.ListExclusionsResponse]]: + def list_exclusions( + self, + ) -> Callable[ + [logging_config.ListExclusionsRequest], + Awaitable[logging_config.ListExclusionsResponse], + ]: r"""Return a callable for the list exclusions method over gRPC. Lists all the exclusions on the \_Default sink in a parent @@ -976,18 +1022,20 @@ def list_exclusions(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', + if "list_exclusions" not in self._stubs: + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, ) - return self._stubs['list_exclusions'] + return self._stubs["list_exclusions"] @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def get_exclusion( + self, + ) -> Callable[ + [logging_config.GetExclusionRequest], Awaitable[logging_config.LogExclusion] + ]: r"""Return a callable for the get exclusion method over gRPC. Gets the description of an exclusion in the \_Default sink. @@ -1002,18 +1050,20 @@ def get_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', + if "get_exclusion" not in self._stubs: + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['get_exclusion'] + return self._stubs["get_exclusion"] @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def create_exclusion( + self, + ) -> Callable[ + [logging_config.CreateExclusionRequest], Awaitable[logging_config.LogExclusion] + ]: r"""Return a callable for the create exclusion method over gRPC. Creates a new exclusion in the \_Default sink in a specified @@ -1030,18 +1080,20 @@ def create_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', + if "create_exclusion" not in self._stubs: + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['create_exclusion'] + return self._stubs["create_exclusion"] @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def update_exclusion( + self, + ) -> Callable[ + [logging_config.UpdateExclusionRequest], Awaitable[logging_config.LogExclusion] + ]: r"""Return a callable for the update exclusion method over gRPC. Changes one or more properties of an existing exclusion in the @@ -1057,18 +1109,18 @@ def update_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + if "update_exclusion" not in self._stubs: + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['update_exclusion'] + return self._stubs["update_exclusion"] @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - Awaitable[empty_pb2.Empty]]: + def delete_exclusion( + self, + ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion in the \_Default sink. @@ -1083,18 +1135,20 @@ def delete_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + if "delete_exclusion" not in self._stubs: + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_exclusion'] + return self._stubs["delete_exclusion"] @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - Awaitable[logging_config.CmekSettings]]: + def get_cmek_settings( + self, + ) -> Callable[ + [logging_config.GetCmekSettingsRequest], Awaitable[logging_config.CmekSettings] + ]: r"""Return a callable for the get cmek settings method over gRPC. Gets the Logging CMEK settings for the given resource. @@ -1118,18 +1172,21 @@ def get_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + if "get_cmek_settings" not in self._stubs: + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['get_cmek_settings'] + return self._stubs["get_cmek_settings"] @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - Awaitable[logging_config.CmekSettings]]: + def update_cmek_settings( + self, + ) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Awaitable[logging_config.CmekSettings], + ]: r"""Return a callable for the update cmek settings method over gRPC. Updates the Log Router CMEK settings for the given resource. @@ -1158,18 +1215,20 @@ def update_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + if "update_cmek_settings" not in self._stubs: + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['update_cmek_settings'] + return self._stubs["update_cmek_settings"] @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - Awaitable[logging_config.Settings]]: + def get_settings( + self, + ) -> Callable[ + [logging_config.GetSettingsRequest], Awaitable[logging_config.Settings] + ]: r"""Return a callable for the get settings method over gRPC. Gets the Log Router settings for the given resource. @@ -1194,18 +1253,20 @@ def get_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSettings', + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['get_settings'] + return self._stubs["get_settings"] @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - Awaitable[logging_config.Settings]]: + def update_settings( + self, + ) -> Callable[ + [logging_config.UpdateSettingsRequest], Awaitable[logging_config.Settings] + ]: r"""Return a callable for the update settings method over gRPC. Updates the Log Router settings for the given resource. @@ -1237,18 +1298,20 @@ def update_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSettings', + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['update_settings'] + return self._stubs["update_settings"] @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - Awaitable[operations_pb2.Operation]]: + def copy_log_entries( + self, + ) -> Callable[ + [logging_config.CopyLogEntriesRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the copy log entries method over gRPC. Copies a set of log entries from a log bucket to a @@ -1264,16 +1327,16 @@ def copy_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['copy_log_entries'] + return self._stubs["copy_log_entries"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_buckets: self._wrap_method( self.list_buckets, @@ -1545,8 +1608,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1563,8 +1625,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1580,9 +1641,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1596,6 +1658,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'ConfigServiceV2GrpcAsyncIOTransport', -) +__all__ = ("ConfigServiceV2GrpcAsyncIOTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py index d9820f09067b..4bf59c378463 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import LoggingServiceV2Client from .async_client import LoggingServiceV2AsyncClient +from .client import LoggingServiceV2Client __all__ = ( - 'LoggingServiceV2Client', - 'LoggingServiceV2AsyncClient', + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 075976b453a5..411196ea9ebb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -14,43 +14,57 @@ # limitations under the License. # import logging as std_logging -from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union - -from google.cloud.logging_v2 import gapic_version as package_version +from collections import OrderedDict +from typing import ( + AsyncIterable, + AsyncIterator, + Awaitable, + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.logging_v2.services.logging_service_v2 import pagers -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore -from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry, logging +from google.longrunning import operations_pb2 # type: ignore + from .client import LoggingServiceV2Client +from .transports.base import DEFAULT_CLIENT_INFO, LoggingServiceV2Transport +from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class LoggingServiceV2AsyncClient: """Service for ingesting and querying logs.""" @@ -65,16 +79,30 @@ class LoggingServiceV2AsyncClient: log_path = staticmethod(LoggingServiceV2Client.log_path) parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) - common_billing_account_path = staticmethod(LoggingServiceV2Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(LoggingServiceV2Client.parse_common_billing_account_path) + common_billing_account_path = staticmethod( + LoggingServiceV2Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + LoggingServiceV2Client.parse_common_billing_account_path + ) common_folder_path = staticmethod(LoggingServiceV2Client.common_folder_path) - parse_common_folder_path = staticmethod(LoggingServiceV2Client.parse_common_folder_path) - common_organization_path = staticmethod(LoggingServiceV2Client.common_organization_path) - parse_common_organization_path = staticmethod(LoggingServiceV2Client.parse_common_organization_path) + parse_common_folder_path = staticmethod( + LoggingServiceV2Client.parse_common_folder_path + ) + common_organization_path = staticmethod( + LoggingServiceV2Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + LoggingServiceV2Client.parse_common_organization_path + ) common_project_path = staticmethod(LoggingServiceV2Client.common_project_path) - parse_common_project_path = staticmethod(LoggingServiceV2Client.parse_common_project_path) + parse_common_project_path = staticmethod( + LoggingServiceV2Client.parse_common_project_path + ) common_location_path = staticmethod(LoggingServiceV2Client.common_location_path) - parse_common_location_path = staticmethod(LoggingServiceV2Client.parse_common_location_path) + parse_common_location_path = staticmethod( + LoggingServiceV2Client.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -116,7 +144,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -179,12 +209,18 @@ def universe_domain(self) -> str: get_transport_class = LoggingServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the logging service v2 async client. Args: @@ -239,31 +275,39 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.LoggingServiceV2", "credentialsType": None, - } + }, ) - async def delete_log(self, - request: Optional[Union[logging.DeleteLogRequest, dict]] = None, - *, - log_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_log( + self, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, + *, + log_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be @@ -326,10 +370,14 @@ async def sample_delete_log(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [log_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -343,14 +391,14 @@ async def sample_delete_log(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_log] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_log + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("log_name", request.log_name), - )), + gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)), ) # Validate the universe domain. @@ -364,17 +412,18 @@ async def sample_delete_log(): metadata=metadata, ) - async def write_log_entries(self, - request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, - *, - log_name: Optional[str] = None, - resource: Optional[monitored_resource_pb2.MonitoredResource] = None, - labels: Optional[MutableMapping[str, str]] = None, - entries: Optional[MutableSequence[log_entry.LogEntry]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging.WriteLogEntriesResponse: + async def write_log_entries( + self, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, + *, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is used, directly or indirectly, by the Logging agent @@ -517,10 +566,14 @@ async def sample_write_log_entries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [log_name, resource, labels, entries] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -541,7 +594,9 @@ async def sample_write_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.write_log_entries] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.write_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -557,16 +612,17 @@ async def sample_write_log_entries(): # Done; return the response. return response - async def list_log_entries(self, - request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, - *, - resource_names: Optional[MutableSequence[str]] = None, - filter: Optional[str] = None, - order_by: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogEntriesAsyncPager: + async def list_log_entries( + self, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, + *, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogEntriesAsyncPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. For ways to export log entries, see `Exporting @@ -669,10 +725,14 @@ async def sample_list_log_entries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [resource_names, filter, order_by] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -690,7 +750,9 @@ async def sample_list_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_log_entries] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -717,13 +779,16 @@ async def sample_list_log_entries(): # Done; return the response. return response - async def list_monitored_resource_descriptors(self, - request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: + async def list_monitored_resource_descriptors( + self, + request: Optional[ + Union[logging.ListMonitoredResourceDescriptorsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -782,7 +847,9 @@ async def sample_list_monitored_resource_descriptors(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_monitored_resource_descriptors] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_monitored_resource_descriptors + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -809,14 +876,15 @@ async def sample_list_monitored_resource_descriptors(): # Done; return the response. return response - async def list_logs(self, - request: Optional[Union[logging.ListLogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogsAsyncPager: + async def list_logs( + self, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogsAsyncPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. @@ -883,10 +951,14 @@ async def sample_list_logs(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -900,14 +972,14 @@ async def sample_list_logs(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_logs] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_logs + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -935,13 +1007,14 @@ async def sample_list_logs(): # Done; return the response. return response - def tail_log_entries(self, - requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: + def tail_log_entries( + self, + requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading logs. @@ -1001,7 +1074,9 @@ def request_generator(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.tail_log_entries] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.tail_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -1059,8 +1134,7 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1068,7 +1142,11 @@ async def list_operations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1115,8 +1193,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1124,7 +1201,11 @@ async def get_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1174,15 +1255,19 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "LoggingServiceV2AsyncClient": return self @@ -1190,12 +1275,13 @@ async def __aenter__(self) -> "LoggingServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "LoggingServiceV2AsyncClient", -) +__all__ = ("LoggingServiceV2AsyncClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py index ea794ebe3670..1343d497918a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -13,27 +13,40 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -from http import HTTPStatus import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Iterable, + Iterator, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) -from google.cloud.logging_v2 import gapic_version as package_version - +import google.protobuf from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -42,18 +55,19 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) -from google.cloud.logging_v2.services.logging_service_v2 import pagers -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore -from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry, logging +from google.longrunning import operations_pb2 # type: ignore + +from .transports.base import DEFAULT_CLIENT_INFO, LoggingServiceV2Transport from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport @@ -65,13 +79,15 @@ class LoggingServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] _transport_registry["grpc"] = LoggingServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[LoggingServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[LoggingServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -147,14 +163,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -193,8 +211,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: LoggingServiceV2Client: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -211,73 +228,103 @@ def transport(self) -> LoggingServiceV2Transport: return self._transport @staticmethod - def log_path(project: str,log: str,) -> str: + def log_path( + project: str, + log: str, + ) -> str: """Returns a fully-qualified log string.""" - return "projects/{project}/logs/{log}".format(project=project, log=log, ) + return "projects/{project}/logs/{log}".format( + project=project, + log=log, + ) @staticmethod - def parse_log_path(path: str) -> Dict[str,str]: + def parse_log_path(path: str) -> Dict[str, str]: """Parses a log path into its component segments.""" m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -309,14 +356,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = LoggingServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -329,7 +380,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -354,7 +407,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -377,7 +432,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint) -> str: + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: """Return the API endpoint used by the client. Args: @@ -393,17 +450,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -439,15 +504,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -480,12 +548,18 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the logging service v2 client. Args: @@ -540,13 +614,21 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = LoggingServiceV2Client._read_environment_variables() - self._client_cert_source = LoggingServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = LoggingServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + LoggingServiceV2Client._read_environment_variables() + ) + self._client_cert_source = LoggingServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = LoggingServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. @@ -558,7 +640,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -567,30 +651,41 @@ def __init__(self, *, if transport_provided: # transport is a LoggingServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly." ) self._transport = cast(LoggingServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - LoggingServiceV2Client._get_api_endpoint( + self._api_endpoint = ( + self._api_endpoint + or LoggingServiceV2Client._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, - self._use_mtls_endpoint)) + self._use_mtls_endpoint, + ) + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[LoggingServiceV2Transport], Callable[..., LoggingServiceV2Transport]] = ( + transport_init: Union[ + Type[LoggingServiceV2Transport], + Callable[..., LoggingServiceV2Transport], + ] = ( LoggingServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LoggingServiceV2Transport], transport) @@ -609,28 +704,37 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.LoggingServiceV2", "credentialsType": None, - } + }, ) - def delete_log(self, - request: Optional[Union[logging.DeleteLogRequest, dict]] = None, - *, - log_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_log( + self, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, + *, + log_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be @@ -693,10 +797,14 @@ def sample_delete_log(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [log_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -714,9 +822,7 @@ def sample_delete_log(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("log_name", request.log_name), - )), + gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)), ) # Validate the universe domain. @@ -730,17 +836,18 @@ def sample_delete_log(): metadata=metadata, ) - def write_log_entries(self, - request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, - *, - log_name: Optional[str] = None, - resource: Optional[monitored_resource_pb2.MonitoredResource] = None, - labels: Optional[MutableMapping[str, str]] = None, - entries: Optional[MutableSequence[log_entry.LogEntry]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging.WriteLogEntriesResponse: + def write_log_entries( + self, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, + *, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is used, directly or indirectly, by the Logging agent @@ -883,10 +990,14 @@ def sample_write_log_entries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [log_name, resource, labels, entries] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -921,16 +1032,17 @@ def sample_write_log_entries(): # Done; return the response. return response - def list_log_entries(self, - request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, - *, - resource_names: Optional[MutableSequence[str]] = None, - filter: Optional[str] = None, - order_by: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogEntriesPager: + def list_log_entries( + self, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, + *, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogEntriesPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. For ways to export log entries, see `Exporting @@ -1033,10 +1145,14 @@ def sample_list_log_entries(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [resource_names, filter, order_by] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1080,13 +1196,16 @@ def sample_list_log_entries(): # Done; return the response. return response - def list_monitored_resource_descriptors(self, - request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsPager: + def list_monitored_resource_descriptors( + self, + request: Optional[ + Union[logging.ListMonitoredResourceDescriptorsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -1145,7 +1264,9 @@ def sample_list_monitored_resource_descriptors(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_monitored_resource_descriptors] + rpc = self._transport._wrapped_methods[ + self._transport.list_monitored_resource_descriptors + ] # Validate the universe domain. self._validate_universe_domain() @@ -1172,14 +1293,15 @@ def sample_list_monitored_resource_descriptors(): # Done; return the response. return response - def list_logs(self, - request: Optional[Union[logging.ListLogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogsPager: + def list_logs( + self, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogsPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. @@ -1246,10 +1368,14 @@ def sample_list_logs(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1267,9 +1393,7 @@ def sample_list_logs(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1297,13 +1421,14 @@ def sample_list_logs(): # Done; return the response. return response - def tail_log_entries(self, - requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[logging.TailLogEntriesResponse]: + def tail_log_entries( + self, + requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[logging.TailLogEntriesResponse]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading logs. @@ -1434,8 +1559,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1444,7 +1568,11 @@ def list_operations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1494,8 +1622,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1504,7 +1631,11 @@ def get_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1557,27 +1688,26 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "LoggingServiceV2Client", -) +__all__ = ("LoggingServiceV2Client",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 84028a18c37b..10d84af1bab6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -13,20 +13,33 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.types import log_entry, logging class ListLogEntriesPager: @@ -46,14 +59,17 @@ class ListLogEntriesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListLogEntriesResponse], - request: logging.ListLogEntriesRequest, - response: logging.ListLogEntriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListLogEntriesResponse], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -86,7 +102,12 @@ def pages(self) -> Iterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[log_entry.LogEntry]: @@ -94,7 +115,7 @@ def __iter__(self) -> Iterator[log_entry.LogEntry]: yield from page.entries def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogEntriesAsyncPager: @@ -114,14 +135,17 @@ class ListLogEntriesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], - request: logging.ListLogEntriesRequest, - response: logging.ListLogEntriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -154,8 +178,14 @@ async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: async def async_generator(): async for page in self.pages: @@ -165,7 +195,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListMonitoredResourceDescriptorsPager: @@ -185,14 +215,17 @@ class ListMonitoredResourceDescriptorsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], - request: logging.ListMonitoredResourceDescriptorsRequest, - response: logging.ListMonitoredResourceDescriptorsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -225,7 +258,12 @@ def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]: @@ -233,7 +271,7 @@ def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescripto yield from page.resource_descriptors def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListMonitoredResourceDescriptorsAsyncPager: @@ -253,14 +291,19 @@ class ListMonitoredResourceDescriptorsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse]], - request: logging.ListMonitoredResourceDescriptorsRequest, - response: logging.ListMonitoredResourceDescriptorsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[ + ..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse] + ], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -289,13 +332,23 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsResponse]: + async def pages( + self, + ) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response - def __aiter__(self) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: + + def __aiter__( + self, + ) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): async for page in self.pages: for response in page.resource_descriptors: @@ -304,7 +357,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogsPager: @@ -324,14 +377,17 @@ class ListLogsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListLogsResponse], - request: logging.ListLogsRequest, - response: logging.ListLogsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListLogsResponse], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -364,7 +420,12 @@ def pages(self) -> Iterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[str]: @@ -372,7 +433,7 @@ def __iter__(self) -> Iterator[str]: yield from page.log_names def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogsAsyncPager: @@ -392,14 +453,17 @@ class ListLogsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListLogsResponse]], - request: logging.ListLogsRequest, - response: logging.ListLogsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListLogsResponse]], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -432,8 +496,14 @@ async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[str]: async def async_generator(): async for page in self.pages: @@ -443,4 +513,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 25058513ec9e..880f7e966313 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -20,14 +20,13 @@ from .grpc import LoggingServiceV2GrpcTransport from .grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport - # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] -_transport_registry['grpc'] = LoggingServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = LoggingServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = LoggingServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport __all__ = ( - 'LoggingServiceV2Transport', - 'LoggingServiceV2GrpcTransport', - 'LoggingServiceV2GrpcAsyncIOTransport', + "LoggingServiceV2Transport", + "LoggingServiceV2GrpcTransport", + "LoggingServiceV2GrpcAsyncIOTransport", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 3de330db3029..5e447e3011cf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -16,22 +16,22 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.logging_v2 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core +import google.auth # type: ignore +import google.protobuf +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.cloud.logging_v2 import gapic_version as package_version from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -41,27 +41,28 @@ class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", ) - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -100,31 +101,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host self._wrapped_methods: Dict[Callable, Callable] = {} @@ -247,69 +260,77 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_log( + self, + ) -> Callable[ + [logging.DeleteLogRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: raise NotImplementedError() @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - Union[ - logging.WriteLogEntriesResponse, - Awaitable[logging.WriteLogEntriesResponse] - ]]: + def write_log_entries( + self, + ) -> Callable[ + [logging.WriteLogEntriesRequest], + Union[ + logging.WriteLogEntriesResponse, Awaitable[logging.WriteLogEntriesResponse] + ], + ]: raise NotImplementedError() @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - Union[ - logging.ListLogEntriesResponse, - Awaitable[logging.ListLogEntriesResponse] - ]]: + def list_log_entries( + self, + ) -> Callable[ + [logging.ListLogEntriesRequest], + Union[ + logging.ListLogEntriesResponse, Awaitable[logging.ListLogEntriesResponse] + ], + ]: raise NotImplementedError() @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - Union[ - logging.ListMonitoredResourceDescriptorsResponse, - Awaitable[logging.ListMonitoredResourceDescriptorsResponse] - ]]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Union[ + logging.ListMonitoredResourceDescriptorsResponse, + Awaitable[logging.ListMonitoredResourceDescriptorsResponse], + ], + ]: raise NotImplementedError() @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - Union[ - logging.ListLogsResponse, - Awaitable[logging.ListLogsResponse] - ]]: + def list_logs( + self, + ) -> Callable[ + [logging.ListLogsRequest], + Union[logging.ListLogsResponse, Awaitable[logging.ListLogsResponse]], + ]: raise NotImplementedError() @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - Union[ - logging.TailLogEntriesResponse, - Awaitable[logging.TailLogEntriesResponse] - ]]: + def tail_log_entries( + self, + ) -> Callable[ + [logging.TailLogEntriesRequest], + Union[ + logging.TailLogEntriesResponse, Awaitable[logging.TailLogEntriesResponse] + ], + ]: raise NotImplementedError() @property @@ -317,7 +338,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -344,6 +368,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'LoggingServiceV2Transport', -) +__all__ = ("LoggingServiceV2Transport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index a255116e4f26..10cef76171f6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -19,24 +19,23 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson +import google.auth # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.message - import grpc # type: ignore import proto # type: ignore - +from google.api_core import gapic_v1, grpc_helpers +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson + +from .base import DEFAULT_CLIENT_INFO, LoggingServiceV2Transport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -46,7 +45,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -67,7 +68,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -78,7 +79,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -93,7 +98,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -115,23 +120,26 @@ class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -258,19 +266,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -306,19 +318,16 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - empty_pb2.Empty]: + def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log for the \_Default Log @@ -337,18 +346,18 @@ def delete_log(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', + if "delete_log" not in self._stubs: + self._stubs["delete_log"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log'] + return self._stubs["delete_log"] @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - logging.WriteLogEntriesResponse]: + def write_log_entries( + self, + ) -> Callable[[logging.WriteLogEntriesRequest], logging.WriteLogEntriesResponse]: r"""Return a callable for the write log entries method over gRPC. Writes log entries to Logging. This API method is the @@ -369,18 +378,18 @@ def write_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + if "write_log_entries" not in self._stubs: + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, ) - return self._stubs['write_log_entries'] + return self._stubs["write_log_entries"] @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - logging.ListLogEntriesResponse]: + def list_log_entries( + self, + ) -> Callable[[logging.ListLogEntriesRequest], logging.ListLogEntriesResponse]: r"""Return a callable for the list log entries method over gRPC. Lists log entries. Use this method to retrieve log entries that @@ -398,18 +407,21 @@ def list_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', + if "list_log_entries" not in self._stubs: + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, ) - return self._stubs['list_log_entries'] + return self._stubs["list_log_entries"] @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - logging.ListMonitoredResourceDescriptorsResponse]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + logging.ListMonitoredResourceDescriptorsResponse, + ]: r"""Return a callable for the list monitored resource descriptors method over gRPC. @@ -426,18 +438,20 @@ def list_monitored_resource_descriptors(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', - request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, - response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + if "list_monitored_resource_descriptors" not in self._stubs: + self._stubs["list_monitored_resource_descriptors"] = ( + self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) ) - return self._stubs['list_monitored_resource_descriptors'] + return self._stubs["list_monitored_resource_descriptors"] @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - logging.ListLogsResponse]: + def list_logs( + self, + ) -> Callable[[logging.ListLogsRequest], logging.ListLogsResponse]: r"""Return a callable for the list logs method over gRPC. Lists the logs in projects, organizations, folders, @@ -454,18 +468,18 @@ def list_logs(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', + if "list_logs" not in self._stubs: + self._stubs["list_logs"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, ) - return self._stubs['list_logs'] + return self._stubs["list_logs"] @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - logging.TailLogEntriesResponse]: + def tail_log_entries( + self, + ) -> Callable[[logging.TailLogEntriesRequest], logging.TailLogEntriesResponse]: r"""Return a callable for the tail log entries method over gRPC. Streaming read of log entries as they are ingested. @@ -482,13 +496,13 @@ def tail_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( - '/google.logging.v2.LoggingServiceV2/TailLogEntries', + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, ) - return self._stubs['tail_log_entries'] + return self._stubs["tail_log_entries"] def close(self): self._logged_channel.close() @@ -497,8 +511,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -515,8 +528,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -532,9 +544,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -552,6 +565,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'LoggingServiceV2GrpcTransport', -) +__all__ = ("LoggingServiceV2GrpcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 50ae005fe396..474407c56bd1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -15,32 +15,31 @@ # import inspect import json -import pickle import logging as std_logging +import pickle import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, LoggingServiceV2Transport from .grpc import LoggingServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -48,9 +47,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -71,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -82,7 +85,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -97,7 +104,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -124,13 +131,15 @@ class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -161,24 +170,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -307,7 +318,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -322,9 +335,9 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - Awaitable[empty_pb2.Empty]]: + def delete_log( + self, + ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log for the \_Default Log @@ -343,18 +356,20 @@ def delete_log(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', + if "delete_log" not in self._stubs: + self._stubs["delete_log"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log'] + return self._stubs["delete_log"] @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - Awaitable[logging.WriteLogEntriesResponse]]: + def write_log_entries( + self, + ) -> Callable[ + [logging.WriteLogEntriesRequest], Awaitable[logging.WriteLogEntriesResponse] + ]: r"""Return a callable for the write log entries method over gRPC. Writes log entries to Logging. This API method is the @@ -375,18 +390,20 @@ def write_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + if "write_log_entries" not in self._stubs: + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, ) - return self._stubs['write_log_entries'] + return self._stubs["write_log_entries"] @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - Awaitable[logging.ListLogEntriesResponse]]: + def list_log_entries( + self, + ) -> Callable[ + [logging.ListLogEntriesRequest], Awaitable[logging.ListLogEntriesResponse] + ]: r"""Return a callable for the list log entries method over gRPC. Lists log entries. Use this method to retrieve log entries that @@ -404,18 +421,21 @@ def list_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', + if "list_log_entries" not in self._stubs: + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, ) - return self._stubs['list_log_entries'] + return self._stubs["list_log_entries"] @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - Awaitable[logging.ListMonitoredResourceDescriptorsResponse]]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Awaitable[logging.ListMonitoredResourceDescriptorsResponse], + ]: r"""Return a callable for the list monitored resource descriptors method over gRPC. @@ -432,18 +452,20 @@ def list_monitored_resource_descriptors(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', - request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, - response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + if "list_monitored_resource_descriptors" not in self._stubs: + self._stubs["list_monitored_resource_descriptors"] = ( + self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) ) - return self._stubs['list_monitored_resource_descriptors'] + return self._stubs["list_monitored_resource_descriptors"] @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - Awaitable[logging.ListLogsResponse]]: + def list_logs( + self, + ) -> Callable[[logging.ListLogsRequest], Awaitable[logging.ListLogsResponse]]: r"""Return a callable for the list logs method over gRPC. Lists the logs in projects, organizations, folders, @@ -460,18 +482,20 @@ def list_logs(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', + if "list_logs" not in self._stubs: + self._stubs["list_logs"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, ) - return self._stubs['list_logs'] + return self._stubs["list_logs"] @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - Awaitable[logging.TailLogEntriesResponse]]: + def tail_log_entries( + self, + ) -> Callable[ + [logging.TailLogEntriesRequest], Awaitable[logging.TailLogEntriesResponse] + ]: r"""Return a callable for the tail log entries method over gRPC. Streaming read of log entries as they are ingested. @@ -488,16 +512,16 @@ def tail_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( - '/google.logging.v2.LoggingServiceV2/TailLogEntries', + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, ) - return self._stubs['tail_log_entries'] + return self._stubs["tail_log_entries"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.delete_log: self._wrap_method( self.delete_log, @@ -628,8 +652,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -646,8 +669,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -663,9 +685,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -679,6 +702,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'LoggingServiceV2GrpcAsyncIOTransport', -) +__all__ = ("LoggingServiceV2GrpcAsyncIOTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index 27ad0c062212..43eb84d6b974 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import BaseMetricsServiceV2Client from .async_client import BaseMetricsServiceV2AsyncClient +from .client import BaseMetricsServiceV2Client __all__ = ( - 'BaseMetricsServiceV2Client', - 'BaseMetricsServiceV2AsyncClient', + "BaseMetricsServiceV2Client", + "BaseMetricsServiceV2AsyncClient", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 68459a35c2df..3666b857ced4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -14,44 +14,56 @@ # limitations under the License. # import logging as std_logging -from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.logging_v2 import gapic_version as package_version +from collections import OrderedDict +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.logging_v2.services.metrics_service_v2 import pagers -from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore import google.api.distribution_pb2 as distribution_pb2 # type: ignore import google.api.metric_pb2 as metric_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore + from .client import BaseMetricsServiceV2Client +from .transports.base import DEFAULT_CLIENT_INFO, MetricsServiceV2Transport +from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class BaseMetricsServiceV2AsyncClient: """Service for configuring logs-based metrics.""" @@ -65,17 +77,33 @@ class BaseMetricsServiceV2AsyncClient: _DEFAULT_UNIVERSE = BaseMetricsServiceV2Client._DEFAULT_UNIVERSE log_metric_path = staticmethod(BaseMetricsServiceV2Client.log_metric_path) - parse_log_metric_path = staticmethod(BaseMetricsServiceV2Client.parse_log_metric_path) - common_billing_account_path = staticmethod(BaseMetricsServiceV2Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(BaseMetricsServiceV2Client.parse_common_billing_account_path) + parse_log_metric_path = staticmethod( + BaseMetricsServiceV2Client.parse_log_metric_path + ) + common_billing_account_path = staticmethod( + BaseMetricsServiceV2Client.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + BaseMetricsServiceV2Client.parse_common_billing_account_path + ) common_folder_path = staticmethod(BaseMetricsServiceV2Client.common_folder_path) - parse_common_folder_path = staticmethod(BaseMetricsServiceV2Client.parse_common_folder_path) - common_organization_path = staticmethod(BaseMetricsServiceV2Client.common_organization_path) - parse_common_organization_path = staticmethod(BaseMetricsServiceV2Client.parse_common_organization_path) + parse_common_folder_path = staticmethod( + BaseMetricsServiceV2Client.parse_common_folder_path + ) + common_organization_path = staticmethod( + BaseMetricsServiceV2Client.common_organization_path + ) + parse_common_organization_path = staticmethod( + BaseMetricsServiceV2Client.parse_common_organization_path + ) common_project_path = staticmethod(BaseMetricsServiceV2Client.common_project_path) - parse_common_project_path = staticmethod(BaseMetricsServiceV2Client.parse_common_project_path) + parse_common_project_path = staticmethod( + BaseMetricsServiceV2Client.parse_common_project_path + ) common_location_path = staticmethod(BaseMetricsServiceV2Client.common_location_path) - parse_common_location_path = staticmethod(BaseMetricsServiceV2Client.parse_common_location_path) + parse_common_location_path = staticmethod( + BaseMetricsServiceV2Client.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -117,7 +145,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -148,7 +178,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return BaseMetricsServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return BaseMetricsServiceV2Client.get_mtls_endpoint_and_cert_source( + client_options + ) # type: ignore @property def transport(self) -> MetricsServiceV2Transport: @@ -180,12 +212,18 @@ def universe_domain(self) -> str: get_transport_class = BaseMetricsServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the base metrics service v2 async client. Args: @@ -240,31 +278,39 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.BaseMetricsServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.MetricsServiceV2", "credentialsType": None, - } + }, ) - async def _list_log_metrics(self, - request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogMetricsAsyncPager: + async def _list_log_metrics( + self, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. .. code-block:: python @@ -329,10 +375,14 @@ async def sample_list_log_metrics(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -346,14 +396,14 @@ async def sample_list_log_metrics(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_log_metrics] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_log_metrics + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -381,14 +431,15 @@ async def sample_list_log_metrics(): # Done; return the response. return response - async def _get_log_metric(self, - request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def _get_log_metric( + self, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. .. code-block:: python @@ -458,10 +509,14 @@ async def sample_get_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -475,14 +530,16 @@ async def sample_get_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_log_metric] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -499,15 +556,16 @@ async def sample_get_log_metric(): # Done; return the response. return response - async def _create_log_metric(self, - request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, - *, - parent: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def _create_log_metric( + self, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, + *, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. .. code-block:: python @@ -593,10 +651,14 @@ async def sample_create_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, metric] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -612,14 +674,14 @@ async def sample_create_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_log_metric] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -636,15 +698,16 @@ async def sample_create_log_metric(): # Done; return the response. return response - async def _update_log_metric(self, - request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def _update_log_metric( + self, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. .. code-block:: python @@ -729,10 +792,14 @@ async def sample_update_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name, metric] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -748,14 +815,16 @@ async def sample_update_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_log_metric] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -772,14 +841,15 @@ async def sample_update_log_metric(): # Done; return the response. return response - async def _delete_log_metric(self, - request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def _delete_log_metric( + self, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a logs-based metric. .. code-block:: python @@ -830,10 +900,14 @@ async def sample_delete_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -847,14 +921,16 @@ async def sample_delete_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_log_metric] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -910,8 +986,7 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -919,7 +994,11 @@ async def list_operations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -966,8 +1045,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -975,7 +1053,11 @@ async def get_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1025,15 +1107,19 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "BaseMetricsServiceV2AsyncClient": return self @@ -1041,12 +1127,13 @@ async def __aenter__(self) -> "BaseMetricsServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "BaseMetricsServiceV2AsyncClient", -) +__all__ = ("BaseMetricsServiceV2AsyncClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py index c83b00ecb06f..75d8a010df8b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -13,27 +13,38 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -from http import HTTPStatus import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) -from google.cloud.logging_v2 import gapic_version as package_version - +import google.protobuf from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -42,19 +53,21 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) -from google.cloud.logging_v2.services.metrics_service_v2 import pagers -from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore import google.api.distribution_pb2 as distribution_pb2 # type: ignore import google.api.metric_pb2 as metric_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore + +from .transports.base import DEFAULT_CLIENT_INFO, MetricsServiceV2Transport from .transports.grpc import MetricsServiceV2GrpcTransport from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport @@ -66,13 +79,15 @@ class BaseMetricsServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] _transport_registry["grpc"] = MetricsServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MetricsServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MetricsServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -148,14 +163,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -194,8 +211,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: BaseMetricsServiceV2Client: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -212,73 +228,103 @@ def transport(self) -> MetricsServiceV2Transport: return self._transport @staticmethod - def log_metric_path(project: str,metric: str,) -> str: + def log_metric_path( + project: str, + metric: str, + ) -> str: """Returns a fully-qualified log_metric string.""" - return "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + return "projects/{project}/metrics/{metric}".format( + project=project, + metric=metric, + ) @staticmethod - def parse_log_metric_path(path: str) -> Dict[str,str]: + def parse_log_metric_path(path: str) -> Dict[str, str]: """Parses a log_metric path into its component segments.""" m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -310,14 +356,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = BaseMetricsServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -330,7 +380,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -355,7 +407,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -378,7 +432,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint) -> str: + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: """Return the API endpoint used by the client. Args: @@ -394,17 +450,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = BaseMetricsServiceV2Client._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -440,15 +504,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -481,12 +548,18 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the base metrics service v2 client. Args: @@ -541,13 +614,21 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BaseMetricsServiceV2Client._read_environment_variables() - self._client_cert_source = BaseMetricsServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = BaseMetricsServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + BaseMetricsServiceV2Client._read_environment_variables() + ) + self._client_cert_source = BaseMetricsServiceV2Client._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = BaseMetricsServiceV2Client._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. @@ -559,7 +640,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -568,30 +651,41 @@ def __init__(self, *, if transport_provided: # transport is a MetricsServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly." ) self._transport = cast(MetricsServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - BaseMetricsServiceV2Client._get_api_endpoint( + self._api_endpoint = ( + self._api_endpoint + or BaseMetricsServiceV2Client._get_api_endpoint( self._client_options.api_endpoint, self._client_cert_source, self._universe_domain, - self._use_mtls_endpoint)) + self._use_mtls_endpoint, + ) + ) if not transport_provided: import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) - transport_init: Union[Type[MetricsServiceV2Transport], Callable[..., MetricsServiceV2Transport]] = ( + transport_init: Union[ + Type[MetricsServiceV2Transport], + Callable[..., MetricsServiceV2Transport], + ] = ( BaseMetricsServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetricsServiceV2Transport], transport) @@ -610,28 +704,37 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.BaseMetricsServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.MetricsServiceV2", "credentialsType": None, - } + }, ) - def _list_log_metrics(self, - request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogMetricsPager: + def _list_log_metrics( + self, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. .. code-block:: python @@ -696,10 +799,14 @@ def sample_list_log_metrics(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -717,9 +824,7 @@ def sample_list_log_metrics(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -747,14 +852,15 @@ def sample_list_log_metrics(): # Done; return the response. return response - def _get_log_metric(self, - request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def _get_log_metric( + self, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. .. code-block:: python @@ -824,10 +930,14 @@ def sample_get_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -845,9 +955,9 @@ def sample_get_log_metric(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -864,15 +974,16 @@ def sample_get_log_metric(): # Done; return the response. return response - def _create_log_metric(self, - request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, - *, - parent: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def _create_log_metric( + self, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, + *, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. .. code-block:: python @@ -958,10 +1069,14 @@ def sample_create_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, metric] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -981,9 +1096,7 @@ def sample_create_log_metric(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1000,15 +1113,16 @@ def sample_create_log_metric(): # Done; return the response. return response - def _update_log_metric(self, - request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def _update_log_metric( + self, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. .. code-block:: python @@ -1093,10 +1207,14 @@ def sample_update_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name, metric] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1116,9 +1234,9 @@ def sample_update_log_metric(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -1135,14 +1253,15 @@ def sample_update_log_metric(): # Done; return the response. return response - def _delete_log_metric(self, - request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def _delete_log_metric( + self, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a logs-based metric. .. code-block:: python @@ -1193,10 +1312,14 @@ def sample_delete_log_metric(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [metric_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1214,9 +1337,9 @@ def sample_delete_log_metric(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("metric_name", request.metric_name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("metric_name", request.metric_name),) + ), ) # Validate the universe domain. @@ -1285,8 +1408,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1295,7 +1417,11 @@ def list_operations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1345,8 +1471,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1355,7 +1480,11 @@ def get_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1408,27 +1537,26 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "BaseMetricsServiceV2Client", -) +__all__ = ("BaseMetricsServiceV2Client",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 3d44cf6e4c67..e296aebedbf8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -13,13 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore @@ -44,14 +58,17 @@ class ListLogMetricsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_metrics.ListLogMetricsResponse], - request: logging_metrics.ListLogMetricsRequest, - response: logging_metrics.ListLogMetricsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_metrics.ListLogMetricsResponse], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -84,7 +101,12 @@ def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_metrics.LogMetric]: @@ -92,7 +114,7 @@ def __iter__(self) -> Iterator[logging_metrics.LogMetric]: yield from page.metrics def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogMetricsAsyncPager: @@ -112,14 +134,17 @@ class ListLogMetricsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], - request: logging_metrics.ListLogMetricsRequest, - response: logging_metrics.ListLogMetricsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -152,8 +177,14 @@ async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: async def async_generator(): async for page in self.pages: @@ -163,4 +194,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index a6eb39e80fa0..10e38acd8596 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -20,14 +20,13 @@ from .grpc import MetricsServiceV2GrpcTransport from .grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport - # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] -_transport_registry['grpc'] = MetricsServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = MetricsServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = MetricsServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport __all__ = ( - 'MetricsServiceV2Transport', - 'MetricsServiceV2GrpcTransport', - 'MetricsServiceV2GrpcAsyncIOTransport', + "MetricsServiceV2Transport", + "MetricsServiceV2GrpcTransport", + "MetricsServiceV2GrpcAsyncIOTransport", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index f919287841e7..f9198cdeccd9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -16,22 +16,22 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.logging_v2 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core +import google.auth # type: ignore +import google.protobuf +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.cloud.logging_v2 import gapic_version as package_version from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -41,27 +41,28 @@ class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", ) - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -100,31 +101,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host self._wrapped_methods: Dict[Callable, Callable] = {} @@ -220,60 +233,63 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - Union[ - logging_metrics.ListLogMetricsResponse, - Awaitable[logging_metrics.ListLogMetricsResponse] - ]]: + def list_log_metrics( + self, + ) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Union[ + logging_metrics.ListLogMetricsResponse, + Awaitable[logging_metrics.ListLogMetricsResponse], + ], + ]: raise NotImplementedError() @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def get_log_metric( + self, + ) -> Callable[ + [logging_metrics.GetLogMetricRequest], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], + ]: raise NotImplementedError() @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def create_log_metric( + self, + ) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], + ]: raise NotImplementedError() @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def update_log_metric( + self, + ) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], + ]: raise NotImplementedError() @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_log_metric( + self, + ) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: raise NotImplementedError() @property @@ -281,7 +297,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -308,6 +327,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'MetricsServiceV2Transport', -) +__all__ = ("MetricsServiceV2Transport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 0594c56f0aee..a1059882a3d6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -19,24 +19,23 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson +import google.auth # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.message - import grpc # type: ignore import proto # type: ignore - +from google.api_core import gapic_v1, grpc_helpers +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson + +from .base import DEFAULT_CLIENT_INFO, MetricsServiceV2Transport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -46,7 +45,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -67,7 +68,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -78,7 +79,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -93,7 +98,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -115,23 +120,26 @@ class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -258,19 +266,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -306,19 +318,20 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - logging_metrics.ListLogMetricsResponse]: + def list_log_metrics( + self, + ) -> Callable[ + [logging_metrics.ListLogMetricsRequest], logging_metrics.ListLogMetricsResponse + ]: r"""Return a callable for the list log metrics method over gRPC. Lists logs-based metrics. @@ -333,18 +346,18 @@ def list_log_metrics(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + if "list_log_metrics" not in self._stubs: + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, ) - return self._stubs['list_log_metrics'] + return self._stubs["list_log_metrics"] @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - logging_metrics.LogMetric]: + def get_log_metric( + self, + ) -> Callable[[logging_metrics.GetLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the get log metric method over gRPC. Gets a logs-based metric. @@ -359,18 +372,18 @@ def get_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', + if "get_log_metric" not in self._stubs: + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['get_log_metric'] + return self._stubs["get_log_metric"] @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - logging_metrics.LogMetric]: + def create_log_metric( + self, + ) -> Callable[[logging_metrics.CreateLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the create log metric method over gRPC. Creates a logs-based metric. @@ -385,18 +398,18 @@ def create_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + if "create_log_metric" not in self._stubs: + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['create_log_metric'] + return self._stubs["create_log_metric"] @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - logging_metrics.LogMetric]: + def update_log_metric( + self, + ) -> Callable[[logging_metrics.UpdateLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the update log metric method over gRPC. Creates or updates a logs-based metric. @@ -411,18 +424,18 @@ def update_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + if "update_log_metric" not in self._stubs: + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['update_log_metric'] + return self._stubs["update_log_metric"] @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - empty_pb2.Empty]: + def delete_log_metric( + self, + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty_pb2.Empty]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -437,13 +450,13 @@ def delete_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + if "delete_log_metric" not in self._stubs: + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log_metric'] + return self._stubs["delete_log_metric"] def close(self): self._logged_channel.close() @@ -452,8 +465,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -470,8 +482,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -487,9 +498,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -507,6 +519,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'MetricsServiceV2GrpcTransport', -) +__all__ = ("MetricsServiceV2GrpcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index a87425f25f28..7963363d29fa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -15,32 +15,31 @@ # import inspect import json -import pickle import logging as std_logging +import pickle import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, MetricsServiceV2Transport from .grpc import MetricsServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -48,9 +47,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -71,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -82,7 +85,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -97,7 +104,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -124,13 +131,15 @@ class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -161,24 +170,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -307,7 +318,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -322,9 +335,12 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - Awaitable[logging_metrics.ListLogMetricsResponse]]: + def list_log_metrics( + self, + ) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Awaitable[logging_metrics.ListLogMetricsResponse], + ]: r"""Return a callable for the list log metrics method over gRPC. Lists logs-based metrics. @@ -339,18 +355,20 @@ def list_log_metrics(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + if "list_log_metrics" not in self._stubs: + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, ) - return self._stubs['list_log_metrics'] + return self._stubs["list_log_metrics"] @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def get_log_metric( + self, + ) -> Callable[ + [logging_metrics.GetLogMetricRequest], Awaitable[logging_metrics.LogMetric] + ]: r"""Return a callable for the get log metric method over gRPC. Gets a logs-based metric. @@ -365,18 +383,20 @@ def get_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', + if "get_log_metric" not in self._stubs: + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['get_log_metric'] + return self._stubs["get_log_metric"] @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def create_log_metric( + self, + ) -> Callable[ + [logging_metrics.CreateLogMetricRequest], Awaitable[logging_metrics.LogMetric] + ]: r"""Return a callable for the create log metric method over gRPC. Creates a logs-based metric. @@ -391,18 +411,20 @@ def create_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + if "create_log_metric" not in self._stubs: + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['create_log_metric'] + return self._stubs["create_log_metric"] @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def update_log_metric( + self, + ) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], Awaitable[logging_metrics.LogMetric] + ]: r"""Return a callable for the update log metric method over gRPC. Creates or updates a logs-based metric. @@ -417,18 +439,18 @@ def update_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + if "update_log_metric" not in self._stubs: + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['update_log_metric'] + return self._stubs["update_log_metric"] @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - Awaitable[empty_pb2.Empty]]: + def delete_log_metric( + self, + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -443,16 +465,16 @@ def delete_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + if "delete_log_metric" not in self._stubs: + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log_metric'] + return self._stubs["delete_log_metric"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_log_metrics: self._wrap_method( self.list_log_metrics, @@ -556,8 +578,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -574,8 +595,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -591,9 +611,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -607,6 +628,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'MetricsServiceV2GrpcAsyncIOTransport', -) +__all__ = ("MetricsServiceV2GrpcAsyncIOTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py index 3023b14aa8ae..8519243a0d2a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py @@ -59,6 +59,8 @@ GetSinkRequest, GetViewRequest, IndexConfig, + IndexType, + LifecycleState, Link, LinkMetadata, ListBucketsRequest, @@ -76,6 +78,7 @@ LogExclusion, LogSink, LogView, + OperationState, Settings, UndeleteBucketRequest, UpdateBucketRequest, @@ -84,9 +87,6 @@ UpdateSettingsRequest, UpdateSinkRequest, UpdateViewRequest, - IndexType, - LifecycleState, - OperationState, ) from .logging_metrics import ( CreateLogMetricRequest, @@ -99,80 +99,80 @@ ) __all__ = ( - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', - 'DeleteLogRequest', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'BigQueryDataset', - 'BigQueryOptions', - 'BucketMetadata', - 'CmekSettings', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesRequest', - 'CopyLogEntriesResponse', - 'CreateBucketRequest', - 'CreateExclusionRequest', - 'CreateLinkRequest', - 'CreateSinkRequest', - 'CreateViewRequest', - 'DeleteBucketRequest', - 'DeleteExclusionRequest', - 'DeleteLinkRequest', - 'DeleteSinkRequest', - 'DeleteViewRequest', - 'GetBucketRequest', - 'GetCmekSettingsRequest', - 'GetExclusionRequest', - 'GetLinkRequest', - 'GetSettingsRequest', - 'GetSinkRequest', - 'GetViewRequest', - 'IndexConfig', - 'Link', - 'LinkMetadata', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'ListLinksRequest', - 'ListLinksResponse', - 'ListSinksRequest', - 'ListSinksResponse', - 'ListViewsRequest', - 'ListViewsResponse', - 'LocationMetadata', - 'LogBucket', - 'LogExclusion', - 'LogSink', - 'LogView', - 'Settings', - 'UndeleteBucketRequest', - 'UpdateBucketRequest', - 'UpdateCmekSettingsRequest', - 'UpdateExclusionRequest', - 'UpdateSettingsRequest', - 'UpdateSinkRequest', - 'UpdateViewRequest', - 'IndexType', - 'LifecycleState', - 'OperationState', - 'CreateLogMetricRequest', - 'DeleteLogMetricRequest', - 'GetLogMetricRequest', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'LogMetric', - 'UpdateLogMetricRequest', + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "Settings", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "IndexType", + "LifecycleState", + "OperationState", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "LogMetric", + "UpdateLogMetricRequest", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py index 106a4c850dad..b381dd1eb9a7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py @@ -17,23 +17,21 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore import google.logging.type.http_request_pb2 as http_request_pb2 # type: ignore import google.logging.type.log_severity_pb2 as log_severity_pb2 # type: ignore import google.protobuf.any_pb2 as any_pb2 # type: ignore import google.protobuf.struct_pb2 as struct_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", }, ) @@ -249,18 +247,18 @@ class LogEntry(proto.Message): proto_payload: any_pb2.Any = proto.Field( proto.MESSAGE, number=2, - oneof='payload', + oneof="payload", message=any_pb2.Any, ) text_payload: str = proto.Field( proto.STRING, number=3, - oneof='payload', + oneof="payload", ) json_payload: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=6, - oneof='payload', + oneof="payload", message=struct_pb2.Struct, ) timestamp: timestamp_pb2.Timestamp = proto.Field( @@ -292,10 +290,10 @@ class LogEntry(proto.Message): proto.STRING, number=11, ) - operation: 'LogEntryOperation' = proto.Field( + operation: "LogEntryOperation" = proto.Field( proto.MESSAGE, number=15, - message='LogEntryOperation', + message="LogEntryOperation", ) trace: str = proto.Field( proto.STRING, @@ -309,15 +307,15 @@ class LogEntry(proto.Message): proto.BOOL, number=30, ) - source_location: 'LogEntrySourceLocation' = proto.Field( + source_location: "LogEntrySourceLocation" = proto.Field( proto.MESSAGE, number=23, - message='LogEntrySourceLocation', + message="LogEntrySourceLocation", ) - split: 'LogSplit' = proto.Field( + split: "LogSplit" = proto.Field( proto.MESSAGE, number=35, - message='LogSplit', + message="LogSplit", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py index 58b6168d7c26..9269d54e0d98 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py @@ -17,29 +17,27 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - -from google.cloud.logging_v2.types import log_entry import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.rpc.status_pb2 as status_pb2 # type: ignore - +import proto # type: ignore +from google.cloud.logging_v2.types import log_entry __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'DeleteLogRequest', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', + "DeleteLogRequest", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "ListLogsRequest", + "ListLogsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", }, ) @@ -191,8 +189,7 @@ class WriteLogEntriesRequest(proto.Message): class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries. - """ + r"""Result returned from WriteLogEntries.""" class WriteLogEntriesPartialErrors(proto.Message): @@ -376,7 +373,9 @@ class ListMonitoredResourceDescriptorsResponse(proto.Message): def raw_page(self): return self - resource_descriptors: MutableSequence[monitored_resource_pb2.MonitoredResourceDescriptor] = proto.RepeatedField( + resource_descriptors: MutableSequence[ + monitored_resource_pb2.MonitoredResourceDescriptor + ] = proto.RepeatedField( proto.MESSAGE, number=1, message=monitored_resource_pb2.MonitoredResourceDescriptor, @@ -556,6 +555,7 @@ class SuppressionInfo(proto.Message): A lower bound on the count of entries omitted due to ``reason``. """ + class Reason(proto.Enum): r"""An indicator of why entries were omitted. @@ -571,14 +571,15 @@ class Reason(proto.Enum): Indicates suppression occurred due to the client not consuming responses quickly enough. """ + REASON_UNSPECIFIED = 0 RATE_LIMIT = 1 NOT_CONSUMED = 2 - reason: 'TailLogEntriesResponse.SuppressionInfo.Reason' = proto.Field( + reason: "TailLogEntriesResponse.SuppressionInfo.Reason" = proto.Field( proto.ENUM, number=1, - enum='TailLogEntriesResponse.SuppressionInfo.Reason', + enum="TailLogEntriesResponse.SuppressionInfo.Reason", ) suppressed_count: int = proto.Field( proto.INT32, diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py index 50c894e3883d..78792702a5dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py @@ -17,68 +17,66 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'OperationState', - 'LifecycleState', - 'IndexType', - 'IndexConfig', - 'LogBucket', - 'LogView', - 'LogSink', - 'BigQueryDataset', - 'Link', - 'BigQueryOptions', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'CreateBucketRequest', - 'UpdateBucketRequest', - 'GetBucketRequest', - 'DeleteBucketRequest', - 'UndeleteBucketRequest', - 'ListViewsRequest', - 'ListViewsResponse', - 'CreateViewRequest', - 'UpdateViewRequest', - 'GetViewRequest', - 'DeleteViewRequest', - 'ListSinksRequest', - 'ListSinksResponse', - 'GetSinkRequest', - 'CreateSinkRequest', - 'UpdateSinkRequest', - 'DeleteSinkRequest', - 'CreateLinkRequest', - 'DeleteLinkRequest', - 'ListLinksRequest', - 'ListLinksResponse', - 'GetLinkRequest', - 'LogExclusion', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'GetExclusionRequest', - 'CreateExclusionRequest', - 'UpdateExclusionRequest', - 'DeleteExclusionRequest', - 'GetCmekSettingsRequest', - 'UpdateCmekSettingsRequest', - 'CmekSettings', - 'GetSettingsRequest', - 'UpdateSettingsRequest', - 'Settings', - 'CopyLogEntriesRequest', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesResponse', - 'BucketMetadata', - 'LinkMetadata', - 'LocationMetadata', + "OperationState", + "LifecycleState", + "IndexType", + "IndexConfig", + "LogBucket", + "LogView", + "LogSink", + "BigQueryDataset", + "Link", + "BigQueryOptions", + "ListBucketsRequest", + "ListBucketsResponse", + "CreateBucketRequest", + "UpdateBucketRequest", + "GetBucketRequest", + "DeleteBucketRequest", + "UndeleteBucketRequest", + "ListViewsRequest", + "ListViewsResponse", + "CreateViewRequest", + "UpdateViewRequest", + "GetViewRequest", + "DeleteViewRequest", + "ListSinksRequest", + "ListSinksResponse", + "GetSinkRequest", + "CreateSinkRequest", + "UpdateSinkRequest", + "DeleteSinkRequest", + "CreateLinkRequest", + "DeleteLinkRequest", + "ListLinksRequest", + "ListLinksResponse", + "GetLinkRequest", + "LogExclusion", + "ListExclusionsRequest", + "ListExclusionsResponse", + "GetExclusionRequest", + "CreateExclusionRequest", + "UpdateExclusionRequest", + "DeleteExclusionRequest", + "GetCmekSettingsRequest", + "UpdateCmekSettingsRequest", + "CmekSettings", + "GetSettingsRequest", + "UpdateSettingsRequest", + "Settings", + "CopyLogEntriesRequest", + "CopyLogEntriesMetadata", + "CopyLogEntriesResponse", + "BucketMetadata", + "LinkMetadata", + "LocationMetadata", }, ) @@ -107,6 +105,7 @@ class OperationState(proto.Enum): OPERATION_STATE_CANCELLED (6): The operation was cancelled by the user. """ + OPERATION_STATE_UNSPECIFIED = 0 OPERATION_STATE_SCHEDULED = 1 OPERATION_STATE_WAITING_FOR_PERMISSIONS = 2 @@ -140,6 +139,7 @@ class LifecycleState(proto.Enum): FAILED (5): The resource is in an INTERNAL error state. """ + LIFECYCLE_STATE_UNSPECIFIED = 0 ACTIVE = 1 DELETE_REQUESTED = 2 @@ -160,6 +160,7 @@ class IndexType(proto.Enum): INDEX_TYPE_INTEGER (2): The index is a integer-type index. """ + INDEX_TYPE_UNSPECIFIED = 0 INDEX_TYPE_STRING = 1 INDEX_TYPE_INTEGER = 2 @@ -191,10 +192,10 @@ class IndexConfig(proto.Message): proto.STRING, number=1, ) - type_: 'IndexType' = proto.Field( + type_: "IndexType" = proto.Field( proto.ENUM, number=2, - enum='IndexType', + enum="IndexType", ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -300,10 +301,10 @@ class LogBucket(proto.Message): proto.BOOL, number=9, ) - lifecycle_state: 'LifecycleState' = proto.Field( + lifecycle_state: "LifecycleState" = proto.Field( proto.ENUM, number=12, - enum='LifecycleState', + enum="LifecycleState", ) analytics_enabled: bool = proto.Field( proto.BOOL, @@ -313,15 +314,15 @@ class LogBucket(proto.Message): proto.STRING, number=15, ) - index_configs: MutableSequence['IndexConfig'] = proto.RepeatedField( + index_configs: MutableSequence["IndexConfig"] = proto.RepeatedField( proto.MESSAGE, number=17, - message='IndexConfig', + message="IndexConfig", ) - cmek_settings: 'CmekSettings' = proto.Field( + cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=19, - message='CmekSettings', + message="CmekSettings", ) @@ -500,6 +501,7 @@ class LogSink(proto.Message): sink. This field may not be present for older sinks. """ + class VersionFormat(proto.Enum): r"""Deprecated. This is unused. @@ -512,6 +514,7 @@ class VersionFormat(proto.Enum): V1 (2): ``LogEntry`` version 1 format. """ + VERSION_FORMAT_UNSPECIFIED = 0 V2 = 1 V1 = 2 @@ -536,10 +539,10 @@ class VersionFormat(proto.Enum): proto.BOOL, number=19, ) - exclusions: MutableSequence['LogExclusion'] = proto.RepeatedField( + exclusions: MutableSequence["LogExclusion"] = proto.RepeatedField( proto.MESSAGE, number=16, - message='LogExclusion', + message="LogExclusion", ) output_version_format: VersionFormat = proto.Field( proto.ENUM, @@ -554,11 +557,11 @@ class VersionFormat(proto.Enum): proto.BOOL, number=9, ) - bigquery_options: 'BigQueryOptions' = proto.Field( + bigquery_options: "BigQueryOptions" = proto.Field( proto.MESSAGE, number=12, - oneof='options', - message='BigQueryOptions', + oneof="options", + message="BigQueryOptions", ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -644,15 +647,15 @@ class Link(proto.Message): number=3, message=timestamp_pb2.Timestamp, ) - lifecycle_state: 'LifecycleState' = proto.Field( + lifecycle_state: "LifecycleState" = proto.Field( proto.ENUM, number=4, - enum='LifecycleState', + enum="LifecycleState", ) - bigquery_dataset: 'BigQueryDataset' = proto.Field( + bigquery_dataset: "BigQueryDataset" = proto.Field( proto.MESSAGE, number=5, - message='BigQueryDataset', + message="BigQueryDataset", ) @@ -755,10 +758,10 @@ class ListBucketsResponse(proto.Message): def raw_page(self): return self - buckets: MutableSequence['LogBucket'] = proto.RepeatedField( + buckets: MutableSequence["LogBucket"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogBucket', + message="LogBucket", ) next_page_token: str = proto.Field( proto.STRING, @@ -800,10 +803,10 @@ class CreateBucketRequest(proto.Message): proto.STRING, number=2, ) - bucket: 'LogBucket' = proto.Field( + bucket: "LogBucket" = proto.Field( proto.MESSAGE, number=3, - message='LogBucket', + message="LogBucket", ) @@ -842,10 +845,10 @@ class UpdateBucketRequest(proto.Message): proto.STRING, number=1, ) - bucket: 'LogBucket' = proto.Field( + bucket: "LogBucket" = proto.Field( proto.MESSAGE, number=2, - message='LogBucket', + message="LogBucket", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -985,10 +988,10 @@ class ListViewsResponse(proto.Message): def raw_page(self): return self - views: MutableSequence['LogView'] = proto.RepeatedField( + views: MutableSequence["LogView"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogView', + message="LogView", ) next_page_token: str = proto.Field( proto.STRING, @@ -1027,10 +1030,10 @@ class CreateViewRequest(proto.Message): proto.STRING, number=2, ) - view: 'LogView' = proto.Field( + view: "LogView" = proto.Field( proto.MESSAGE, number=3, - message='LogView', + message="LogView", ) @@ -1066,10 +1069,10 @@ class UpdateViewRequest(proto.Message): proto.STRING, number=1, ) - view: 'LogView' = proto.Field( + view: "LogView" = proto.Field( proto.MESSAGE, number=2, - message='LogView', + message="LogView", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -1181,10 +1184,10 @@ class ListSinksResponse(proto.Message): def raw_page(self): return self - sinks: MutableSequence['LogSink'] = proto.RepeatedField( + sinks: MutableSequence["LogSink"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogSink', + message="LogSink", ) next_page_token: str = proto.Field( proto.STRING, @@ -1259,10 +1262,10 @@ class CreateSinkRequest(proto.Message): proto.STRING, number=1, ) - sink: 'LogSink' = proto.Field( + sink: "LogSink" = proto.Field( proto.MESSAGE, number=2, - message='LogSink', + message="LogSink", ) unique_writer_identity: bool = proto.Field( proto.BOOL, @@ -1331,10 +1334,10 @@ class UpdateSinkRequest(proto.Message): proto.STRING, number=1, ) - sink: 'LogSink' = proto.Field( + sink: "LogSink" = proto.Field( proto.MESSAGE, number=2, - message='LogSink', + message="LogSink", ) unique_writer_identity: bool = proto.Field( proto.BOOL, @@ -1399,10 +1402,10 @@ class CreateLinkRequest(proto.Message): proto.STRING, number=1, ) - link: 'Link' = proto.Field( + link: "Link" = proto.Field( proto.MESSAGE, number=2, - message='Link', + message="Link", ) link_id: str = proto.Field( proto.STRING, @@ -1481,10 +1484,10 @@ class ListLinksResponse(proto.Message): def raw_page(self): return self - links: MutableSequence['Link'] = proto.RepeatedField( + links: MutableSequence["Link"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Link', + message="Link", ) next_page_token: str = proto.Field( proto.STRING, @@ -1643,10 +1646,10 @@ class ListExclusionsResponse(proto.Message): def raw_page(self): return self - exclusions: MutableSequence['LogExclusion'] = proto.RepeatedField( + exclusions: MutableSequence["LogExclusion"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogExclusion', + message="LogExclusion", ) next_page_token: str = proto.Field( proto.STRING, @@ -1708,10 +1711,10 @@ class CreateExclusionRequest(proto.Message): proto.STRING, number=1, ) - exclusion: 'LogExclusion' = proto.Field( + exclusion: "LogExclusion" = proto.Field( proto.MESSAGE, number=2, - message='LogExclusion', + message="LogExclusion", ) @@ -1752,10 +1755,10 @@ class UpdateExclusionRequest(proto.Message): proto.STRING, number=1, ) - exclusion: 'LogExclusion' = proto.Field( + exclusion: "LogExclusion" = proto.Field( proto.MESSAGE, number=2, - message='LogExclusion', + message="LogExclusion", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -1874,10 +1877,10 @@ class UpdateCmekSettingsRequest(proto.Message): proto.STRING, number=1, ) - cmek_settings: 'CmekSettings' = proto.Field( + cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=2, - message='CmekSettings', + message="CmekSettings", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2073,10 +2076,10 @@ class UpdateSettingsRequest(proto.Message): proto.STRING, number=1, ) - settings: 'Settings' = proto.Field( + settings: "Settings" = proto.Field( proto.MESSAGE, number=2, - message='Settings', + message="Settings", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2249,19 +2252,19 @@ class CopyLogEntriesMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) cancellation_requested: bool = proto.Field( proto.BOOL, number=4, ) - request: 'CopyLogEntriesRequest' = proto.Field( + request: "CopyLogEntriesRequest" = proto.Field( proto.MESSAGE, number=5, - message='CopyLogEntriesRequest', + message="CopyLogEntriesRequest", ) progress: int = proto.Field( proto.INT32, @@ -2324,22 +2327,22 @@ class BucketMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) - create_bucket_request: 'CreateBucketRequest' = proto.Field( + create_bucket_request: "CreateBucketRequest" = proto.Field( proto.MESSAGE, number=4, - oneof='request', - message='CreateBucketRequest', + oneof="request", + message="CreateBucketRequest", ) - update_bucket_request: 'UpdateBucketRequest' = proto.Field( + update_bucket_request: "UpdateBucketRequest" = proto.Field( proto.MESSAGE, number=5, - oneof='request', - message='UpdateBucketRequest', + oneof="request", + message="UpdateBucketRequest", ) @@ -2380,22 +2383,22 @@ class LinkMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) - create_link_request: 'CreateLinkRequest' = proto.Field( + create_link_request: "CreateLinkRequest" = proto.Field( proto.MESSAGE, number=4, - oneof='request', - message='CreateLinkRequest', + oneof="request", + message="CreateLinkRequest", ) - delete_link_request: 'DeleteLinkRequest' = proto.Field( + delete_link_request: "DeleteLinkRequest" = proto.Field( proto.MESSAGE, number=5, - oneof='request', - message='DeleteLinkRequest', + oneof="request", + message="DeleteLinkRequest", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py index 4fb515d770d4..3fe0393c0e83 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py @@ -17,23 +17,21 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.api.distribution_pb2 as distribution_pb2 # type: ignore import google.api.metric_pb2 as metric_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'LogMetric', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'GetLogMetricRequest', - 'CreateLogMetricRequest', - 'UpdateLogMetricRequest', - 'DeleteLogMetricRequest', + "LogMetric", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "GetLogMetricRequest", + "CreateLogMetricRequest", + "UpdateLogMetricRequest", + "DeleteLogMetricRequest", }, ) @@ -180,6 +178,7 @@ class LogMetric(proto.Message): updated this metric. The v2 format is used by default and cannot be changed. """ + class ApiVersion(proto.Enum): r"""Logging API version. @@ -189,6 +188,7 @@ class ApiVersion(proto.Enum): V1 (1): Logging API v1. """ + V2 = 0 V1 = 1 @@ -302,10 +302,10 @@ class ListLogMetricsResponse(proto.Message): def raw_page(self): return self - metrics: MutableSequence['LogMetric'] = proto.RepeatedField( + metrics: MutableSequence["LogMetric"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogMetric', + message="LogMetric", ) next_page_token: str = proto.Field( proto.STRING, @@ -353,10 +353,10 @@ class CreateLogMetricRequest(proto.Message): proto.STRING, number=1, ) - metric: 'LogMetric' = proto.Field( + metric: "LogMetric" = proto.Field( proto.MESSAGE, number=2, - message='LogMetric', + message="LogMetric", ) @@ -383,10 +383,10 @@ class UpdateLogMetricRequest(proto.Message): proto.STRING, number=1, ) - metric: 'LogMetric' = proto.Field( + metric: "LogMetric" = proto.Field( proto.MESSAGE, number=2, - message='LogMetric', + message="LogMetric", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py index 491848c947bd..52a241a3d4bb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py @@ -17,9 +17,8 @@ import pathlib import re import shutil - -from typing import Dict, List import warnings +from typing import Dict, List import nox @@ -154,7 +153,8 @@ def lint(session): # 2. Check formatting session.run( - "ruff", "format", + "ruff", + "format", "--check", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", @@ -167,12 +167,15 @@ def lint(session): @nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): """(Deprecated) Legacy session. Please use 'nox -s format'.""" - session.log("WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future.") + session.log( + "WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future." + ) # Just run the ruff formatter (keeping legacy behavior of only formatting, not sorting imports) session.install(RUFF_VERSION) session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", *LINT_PATHS, @@ -191,8 +194,10 @@ def format(session): # check --select I: Enables strict import sorting # --fix: Applies the changes automatically session.run( - "ruff", "check", - "--select", "I", + "ruff", + "check", + "--select", + "I", "--fix", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length @@ -201,7 +206,8 @@ def format(session): # 3. Run Ruff to format code session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length *LINT_PATHS, @@ -386,8 +392,10 @@ def docs(session): "sphinx-build", "-T", # show full traceback on exception "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + "-b", + "html", # builder + "-d", + os.path.join("docs", "_build", "doctrees", ""), # cache directory # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async_internal.py index 694c55cbeb01..d14350d45d34 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async_internal.py @@ -54,4 +54,5 @@ async def sample_copy_log_entries(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync_internal.py index 18bd5e92ec18..168f8ee74114 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync_internal.py @@ -54,4 +54,5 @@ def sample_copy_log_entries(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index fb323dcf6c9e..9ebcaee1a1a0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -50,4 +50,5 @@ async def sample_create_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py index 7761a294851c..b2c6f11ab8f1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -54,4 +54,5 @@ async def sample_create_bucket_async(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py index 4b137facd3c3..c35d7b814e7e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -54,4 +54,5 @@ def sample_create_bucket_async(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index 60feee2dc575..8b73c024b4ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -50,4 +50,5 @@ def sample_create_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async_internal.py index 5e91a182dce4..5aaf3674708b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async_internal.py @@ -54,4 +54,5 @@ async def sample_create_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateExclusion_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync_internal.py index a01f0852d211..3821b1c8968c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync_internal.py @@ -54,4 +54,5 @@ def sample_create_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateExclusion_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async_internal.py index bc2214e78839..dd16afc8f7ab 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async_internal.py @@ -54,4 +54,5 @@ async def sample_create_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateLink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync_internal.py index eb17f0d59d52..01ee988b54a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync_internal.py @@ -54,4 +54,5 @@ def sample_create_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateLink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async_internal.py index a60e9c5ad231..0a18b6c26de1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async_internal.py @@ -54,4 +54,5 @@ async def sample_create_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateSink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync_internal.py index 191ba174f415..69e0c40e10c0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync_internal.py @@ -54,4 +54,5 @@ def sample_create_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateSink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async_internal.py index 683be2c311d4..46d6f58b8076 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async_internal.py @@ -50,4 +50,5 @@ async def sample_create_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateView_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync_internal.py index 86a7a77e2f14..d711e4a8d018 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync_internal.py @@ -50,4 +50,5 @@ def sample_create_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_CreateView_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async_internal.py index c3cfea93c773..9e72ca00db76 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async_internal.py @@ -53,4 +53,5 @@ async def sample_delete_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_DeleteLink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync_internal.py index 6921f5b35d58..4fc76ed3923c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync_internal.py @@ -53,4 +53,5 @@ def sample_delete_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_DeleteLink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index 54af8efd76bb..61fe3182f933 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -49,4 +49,5 @@ async def sample_get_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index 9a4020018c62..06bf7be8521c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -49,4 +49,5 @@ def sample_get_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async_internal.py index ec8b0f5cdaca..78b3ef076da7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async_internal.py @@ -49,4 +49,5 @@ async def sample_get_cmek_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync_internal.py index a7a498879b34..4c6c64e683c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync_internal.py @@ -49,4 +49,5 @@ def sample_get_cmek_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async_internal.py index cd23fc5e3b10..d9fa7db83286 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async_internal.py @@ -49,4 +49,5 @@ async def sample_get_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetExclusion_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync_internal.py index e12a5a4951eb..d113da10c3a0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync_internal.py @@ -49,4 +49,5 @@ def sample_get_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetExclusion_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async_internal.py index 7b65e4a65094..aad97aa37270 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async_internal.py @@ -49,4 +49,5 @@ async def sample_get_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetLink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync_internal.py index 0530e845acae..60ca090b3bea 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync_internal.py @@ -49,4 +49,5 @@ def sample_get_link(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetLink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async_internal.py index 09f1cc42a447..476dd559973a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async_internal.py @@ -49,4 +49,5 @@ async def sample_get_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync_internal.py index e0409e255d33..117ddb2496ce 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync_internal.py @@ -49,4 +49,5 @@ def sample_get_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async_internal.py index 53643669d446..52f2cd2f2e01 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async_internal.py @@ -49,4 +49,5 @@ async def sample_get_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetSink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync_internal.py index 72be15ec0d5b..3ed57ee37c16 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync_internal.py @@ -49,4 +49,5 @@ def sample_get_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetSink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async_internal.py index f34ca9b4ca54..c50832284cb4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async_internal.py @@ -49,4 +49,5 @@ async def sample_get_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetView_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync_internal.py index a2249d9d0cbd..7c07b9a51ac7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync_internal.py @@ -49,4 +49,5 @@ def sample_get_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_GetView_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index ee91e82647b4..876ec97ef335 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -50,4 +50,5 @@ async def sample_list_buckets(): async for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListBuckets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 306a29952f8b..d90674dc8fb5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -50,4 +50,5 @@ def sample_list_buckets(): for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListBuckets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async_internal.py index 86146b6a70f8..92fcd5d587f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async_internal.py @@ -50,4 +50,5 @@ async def sample_list_exclusions(): async for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListExclusions_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync_internal.py index f8a73223ac01..ea9c711dedb2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync_internal.py @@ -50,4 +50,5 @@ def sample_list_exclusions(): for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListExclusions_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async_internal.py index 6f1bd5b9ed90..6d911312c9e2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async_internal.py @@ -50,4 +50,5 @@ async def sample_list_links(): async for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListLinks_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync_internal.py index ed645aefa7b2..60fd122127eb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync_internal.py @@ -50,4 +50,5 @@ def sample_list_links(): for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListLinks_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async_internal.py index 7907bdee177c..2bb182fb5b4e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async_internal.py @@ -50,4 +50,5 @@ async def sample_list_sinks(): async for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListSinks_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync_internal.py index a3a8c24551df..035e9d5d7dd7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync_internal.py @@ -50,4 +50,5 @@ def sample_list_sinks(): for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListSinks_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async_internal.py index e37a37fc4c2a..16ceebecf24a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async_internal.py @@ -50,4 +50,5 @@ async def sample_list_views(): async for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListViews_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync_internal.py index af7165fcd9c3..307b844fbbc4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync_internal.py @@ -50,4 +50,5 @@ def sample_list_views(): for response in page_result: print(response) + # [END logging_v2_generated_ConfigServiceV2_ListViews_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index e1f67c09803a..fd0a768fcd7e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -49,4 +49,5 @@ async def sample_update_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py index 452fa73bc39a..d285da9d86da 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -53,4 +53,5 @@ async def sample_update_bucket_async(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py index e6782e0affa5..dc712197023a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -53,4 +53,5 @@ def sample_update_bucket_async(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index 512c1bc09762..280d7fa41687 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -49,4 +49,5 @@ def sample_update_bucket(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async_internal.py index ee425c0b293d..c22232c245b7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async_internal.py @@ -49,4 +49,5 @@ async def sample_update_cmek_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync_internal.py index d65874e81c2b..84fec964b5c0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync_internal.py @@ -49,4 +49,5 @@ def sample_update_cmek_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async_internal.py index e5ecd49b246f..14cd9d43beb1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async_internal.py @@ -54,4 +54,5 @@ async def sample_update_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync_internal.py index 35e8e57f64d1..75ffef9952a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync_internal.py @@ -54,4 +54,5 @@ def sample_update_exclusion(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async_internal.py index 85b2289de5a7..086a61771602 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async_internal.py @@ -49,4 +49,5 @@ async def sample_update_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync_internal.py index 94d7c529145f..04472956586e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync_internal.py @@ -49,4 +49,5 @@ def sample_update_settings(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async_internal.py index 30aba394e4f8..31db442908be 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async_internal.py @@ -54,4 +54,5 @@ async def sample_update_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateSink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync_internal.py index dd0bc17a9152..666d2f6395f3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync_internal.py @@ -54,4 +54,5 @@ def sample_update_sink(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateSink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async_internal.py index 7b0143d20812..b203aee0d304 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async_internal.py @@ -49,4 +49,5 @@ async def sample_update_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateView_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync_internal.py index d9701082d680..385bac012ddf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync_internal.py @@ -49,4 +49,5 @@ def sample_update_view(): # Handle the response print(response) + # [END logging_v2_generated_ConfigServiceV2_UpdateView_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index c421061799d6..279dd4e70cf3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -40,7 +40,7 @@ async def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value1', 'resource_names_value2'], + resource_names=["resource_names_value1", "resource_names_value2"], ) # Make the request @@ -50,4 +50,5 @@ async def sample_list_log_entries(): async for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index 06ca2f113fd9..2afed02b2f85 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -40,7 +40,7 @@ def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value1', 'resource_names_value2'], + resource_names=["resource_names_value1", "resource_names_value2"], ) # Make the request @@ -50,4 +50,5 @@ def sample_list_log_entries(): for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index d33584db700d..abe4a9edb53b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -50,4 +50,5 @@ async def sample_list_logs(): async for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListLogs_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index 11ef98ea7222..829706dd4085 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -50,4 +50,5 @@ def sample_list_logs(): for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListLogs_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index 574a533dd739..fe8cfdf55ff7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -39,8 +39,7 @@ async def sample_list_monitored_resource_descriptors(): client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListMonitoredResourceDescriptorsRequest( - ) + request = logging_v2.ListMonitoredResourceDescriptorsRequest() # Make the request page_result = client.list_monitored_resource_descriptors(request=request) @@ -49,4 +48,5 @@ async def sample_list_monitored_resource_descriptors(): async for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index ca6b21d3b0e0..fc1587be7a21 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -39,8 +39,7 @@ def sample_list_monitored_resource_descriptors(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListMonitoredResourceDescriptorsRequest( - ) + request = logging_v2.ListMonitoredResourceDescriptorsRequest() # Make the request page_result = client.list_monitored_resource_descriptors(request=request) @@ -49,4 +48,5 @@ def sample_list_monitored_resource_descriptors(): for response in page_result: print(response) + # [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 646ec1476b63..ec9ef2f25aaa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -40,7 +40,7 @@ async def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value1', 'resource_names_value2'], + resource_names=["resource_names_value1", "resource_names_value2"], ) # This method expects an iterator which contains @@ -60,4 +60,5 @@ def request_generator(): async for response in stream: print(response) + # [END logging_v2_generated_LoggingServiceV2_TailLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index aab2284789e3..440ac83d6ee1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -40,7 +40,7 @@ def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value1', 'resource_names_value2'], + resource_names=["resource_names_value1", "resource_names_value2"], ) # This method expects an iterator which contains @@ -60,4 +60,5 @@ def request_generator(): for response in stream: print(response) + # [END logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index 75c30e24dd70..740fb8e5bf31 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -52,4 +52,5 @@ async def sample_write_log_entries(): # Handle the response print(response) + # [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index 78b2c8242f9d..06198642f49b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -52,4 +52,5 @@ def sample_write_log_entries(): # Handle the response print(response) + # [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async_internal.py index 31a1abf2191a..bc965ed9dc6c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async_internal.py @@ -54,4 +54,5 @@ async def sample_create_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync_internal.py index c5ebd5a87288..31e568f6751a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync_internal.py @@ -54,4 +54,5 @@ def sample_create_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async_internal.py index 442c5d4ccc9f..2f6d3b7bc5a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async_internal.py @@ -49,4 +49,5 @@ async def sample_get_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_GetLogMetric_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync_internal.py index 571bdc3799b6..62f2ff712491 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync_internal.py @@ -49,4 +49,5 @@ def sample_get_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_GetLogMetric_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async_internal.py index 82973e7b77a5..51bd15ad2765 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async_internal.py @@ -50,4 +50,5 @@ async def sample_list_log_metrics(): async for response in page_result: print(response) + # [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync_internal.py index 34392032135d..13fef8cb64ca 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync_internal.py @@ -50,4 +50,5 @@ def sample_list_log_metrics(): for response in page_result: print(response) + # [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async_internal.py index 8d1e95542024..ed029f8a9e96 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async_internal.py @@ -54,4 +54,5 @@ async def sample_update_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync_internal.py index a1a8e5f9f6ad..e59f01ef87ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync_internal.py @@ -54,4 +54,5 @@ def sample_update_log_metric(): # Handle the response print(response) + # [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py index d330baf0cfcd..12f029f82f06 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py @@ -17,20 +17,20 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-cloud-logging' +name = "google-cloud-logging" description = "Google Cloud Logging API client library" version = None -with open(os.path.join(package_root, 'google/cloud/logging/gapic_version.py')) as fp: +with open(os.path.join(package_root, "google/cloud/logging/gapic_version.py")) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": @@ -49,8 +49,7 @@ "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf >= 4.25.8, < 8.0.0", ] -extras = { -} +extras = {} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-logging" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py index 71d63642bf6e..6ad798d515b9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,48 +22,52 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio import json import math +from collections.abc import Mapping, Sequence + +import grpc import pytest -from collections.abc import Sequence, Mapping from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from grpc.experimental import aio from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import client_options +import google.api_core.operation_async as operation_async # type: ignore +import google.auth +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operations_v1 -from google.api_core import path_template from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.logging_v2.services.config_service_v2 import BaseConfigServiceV2AsyncClient -from google.cloud.logging_v2.services.config_service_v2 import BaseConfigServiceV2Client -from google.cloud.logging_v2.services.config_service_v2 import pagers -from google.cloud.logging_v2.services.config_service_v2 import transports +from google.cloud.logging_v2.services.config_service_v2 import ( + BaseConfigServiceV2AsyncClient, + BaseConfigServiceV2Client, + pagers, + transports, +) from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account -import google.api_core.operation_async as operation_async # type: ignore -import google.auth -import google.protobuf.empty_pb2 as empty_pb2 # type: ignore -import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore -import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - - CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -77,9 +82,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -87,17 +94,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -109,21 +126,52 @@ def test__get_default_mtls_endpoint(): custom_endpoint = ".custom" assert BaseConfigServiceV2Client._get_default_mtls_endpoint(None) is None - assert BaseConfigServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert BaseConfigServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert BaseConfigServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert BaseConfigServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert BaseConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - assert BaseConfigServiceV2Client._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + assert ( + BaseConfigServiceV2Client._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + BaseConfigServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + BaseConfigServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BaseConfigServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BaseConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + assert ( + BaseConfigServiceV2Client._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + def test__read_environment_variables(): - assert BaseConfigServiceV2Client._read_environment_variables() == (False, "auto", None) + assert BaseConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert BaseConfigServiceV2Client._read_environment_variables() == (True, "auto", None) + assert BaseConfigServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert BaseConfigServiceV2Client._read_environment_variables() == (False, "auto", None) + assert BaseConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} @@ -137,27 +185,46 @@ def test__read_environment_variables(): ) else: assert BaseConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BaseConfigServiceV2Client._read_environment_variables() == ( False, - "auto", + "never", None, ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert BaseConfigServiceV2Client._read_environment_variables() == (False, "never", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert BaseConfigServiceV2Client._read_environment_variables() == (False, "always", None) + assert BaseConfigServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert BaseConfigServiceV2Client._read_environment_variables() == (False, "auto", None) + assert BaseConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: BaseConfigServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert BaseConfigServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + assert BaseConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -166,7 +233,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert BaseConfigServiceV2Client._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -174,7 +243,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert BaseConfigServiceV2Client._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -186,7 +257,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert BaseConfigServiceV2Client._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -198,7 +271,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert BaseConfigServiceV2Client._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -210,7 +285,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert BaseConfigServiceV2Client._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -225,83 +302,177 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): BaseConfigServiceV2Client._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert BaseConfigServiceV2Client._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert BaseConfigServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert BaseConfigServiceV2Client._get_client_cert_source(None, False) is None - assert BaseConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None - assert BaseConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + BaseConfigServiceV2Client._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + BaseConfigServiceV2Client._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + BaseConfigServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + BaseConfigServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert BaseConfigServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source - assert BaseConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(BaseConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2Client)) -@mock.patch.object(BaseConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2AsyncClient)) +@mock.patch.object( + BaseConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseConfigServiceV2Client), +) +@mock.patch.object( + BaseConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseConfigServiceV2AsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = BaseConfigServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert BaseConfigServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert BaseConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert BaseConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert BaseConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert BaseConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert BaseConfigServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert BaseConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + BaseConfigServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + BaseConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BaseConfigServiceV2Client._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + BaseConfigServiceV2Client._get_api_endpoint( + None, None, default_universe, "always" + ) + == BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BaseConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BaseConfigServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + BaseConfigServiceV2Client._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - BaseConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + BaseConfigServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert BaseConfigServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert BaseConfigServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert BaseConfigServiceV2Client._get_universe_domain(None, None) == BaseConfigServiceV2Client._DEFAULT_UNIVERSE + assert ( + BaseConfigServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + BaseConfigServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + BaseConfigServiceV2Client._get_universe_domain(None, None) + == BaseConfigServiceV2Client._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: BaseConfigServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -317,7 +488,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -330,59 +502,83 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (BaseConfigServiceV2Client, "grpc"), - (BaseConfigServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_base_config_service_v2_client_from_service_account_info(client_class, transport_name): + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BaseConfigServiceV2Client, "grpc"), + (BaseConfigServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_base_config_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.ConfigServiceV2GrpcTransport, "grpc"), - (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_base_config_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ConfigServiceV2GrpcTransport, "grpc"), + (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_base_config_service_v2_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (BaseConfigServiceV2Client, "grpc"), - (BaseConfigServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_base_config_service_v2_client_from_service_account_file(client_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BaseConfigServiceV2Client, "grpc"), + (BaseConfigServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_base_config_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") def test_base_config_service_v2_client_get_transport_class(): @@ -396,29 +592,44 @@ def test_base_config_service_v2_client_get_transport_class(): assert transport == transports.ConfigServiceV2GrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), - (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(BaseConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2Client)) -@mock.patch.object(BaseConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2AsyncClient)) -def test_base_config_service_v2_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + ( + BaseConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + BaseConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseConfigServiceV2Client), +) +@mock.patch.object( + BaseConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseConfigServiceV2AsyncClient), +) +def test_base_config_service_v2_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(BaseConfigServiceV2Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(BaseConfigServiceV2Client, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(BaseConfigServiceV2Client, 'get_transport_class') as gtc: + with mock.patch.object(BaseConfigServiceV2Client, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -436,13 +647,15 @@ def test_base_config_service_v2_client_client_options(client_class, transport_cl # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -454,7 +667,7 @@ def test_base_config_service_v2_client_client_options(client_class, transport_cl # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -474,17 +687,22 @@ def test_base_config_service_v2_client_client_options(client_class, transport_cl with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -493,46 +711,90 @@ def test_base_config_service_v2_client_client_options(client_class, transport_cl api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" + api_audience="https://language.googleapis.com", ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "true"), - (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "false"), - (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(BaseConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2Client)) -@mock.patch.object(BaseConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2AsyncClient)) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + BaseConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + "true", + ), + ( + BaseConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + BaseConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + "false", + ), + ( + BaseConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + BaseConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseConfigServiceV2Client), +) +@mock.patch.object( + BaseConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseConfigServiceV2AsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_base_config_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_base_config_service_v2_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -551,12 +813,22 @@ def test_base_config_service_v2_client_mtls_env_auto(client_class, transport_cla # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -577,15 +849,22 @@ def test_base_config_service_v2_client_mtls_env_auto(client_class, transport_cla ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -595,19 +874,31 @@ def test_base_config_service_v2_client_mtls_env_auto(client_class, transport_cla ) -@pytest.mark.parametrize("client_class", [ - BaseConfigServiceV2Client, BaseConfigServiceV2AsyncClient -]) -@mock.patch.object(BaseConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(BaseConfigServiceV2Client)) -@mock.patch.object(BaseConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BaseConfigServiceV2AsyncClient)) +@pytest.mark.parametrize( + "client_class", [BaseConfigServiceV2Client, BaseConfigServiceV2AsyncClient] +) +@mock.patch.object( + BaseConfigServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BaseConfigServiceV2Client), +) +@mock.patch.object( + BaseConfigServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BaseConfigServiceV2AsyncClient), +) def test_base_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -615,18 +906,25 @@ def test_base_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -663,23 +961,23 @@ def test_base_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_ env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -710,23 +1008,23 @@ def test_base_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_ env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -742,16 +1040,27 @@ def test_base_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_ # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -761,27 +1070,50 @@ def test_base_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_ with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) -@pytest.mark.parametrize("client_class", [ - BaseConfigServiceV2Client, BaseConfigServiceV2AsyncClient -]) -@mock.patch.object(BaseConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2Client)) -@mock.patch.object(BaseConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2AsyncClient)) + +@pytest.mark.parametrize( + "client_class", [BaseConfigServiceV2Client, BaseConfigServiceV2AsyncClient] +) +@mock.patch.object( + BaseConfigServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseConfigServiceV2Client), +) +@mock.patch.object( + BaseConfigServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseConfigServiceV2AsyncClient), +) def test_base_config_service_v2_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = BaseConfigServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -804,11 +1136,19 @@ def test_base_config_service_v2_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -816,26 +1156,39 @@ def test_base_config_service_v2_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), - (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_base_config_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + ( + BaseConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_base_config_service_v2_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -844,23 +1197,39 @@ def test_base_config_service_v2_client_client_options_scopes(client_class, trans api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", grpc_helpers), - (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_base_config_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BaseConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BaseConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_base_config_service_v2_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -869,11 +1238,14 @@ def test_base_config_service_v2_client_client_options_credentials_file(client_cl api_audience=None, ) + def test_base_config_service_v2_client_client_options_from_dict(): - with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None client = BaseConfigServiceV2Client( - client_options={'api_endpoint': 'squid.clam.whelk'} + client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( credentials=None, @@ -888,23 +1260,38 @@ def test_base_config_service_v2_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", grpc_helpers), - (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_base_config_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BaseConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BaseConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_base_config_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -914,13 +1301,13 @@ def test_base_config_service_v2_client_create_channel_credentials_file(client_cl ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -932,11 +1319,11 @@ def test_base_config_service_v2_client_create_channel_credentials_file(client_cl credentials_file=None, quota_project_id=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), scopes=None, default_host="logging.googleapis.com", ssl_credentials=None, @@ -947,11 +1334,14 @@ def test_base_config_service_v2_client_create_channel_credentials_file(client_cl ) -@pytest.mark.parametrize("request_type", [ - logging_config.ListBucketsRequest, - dict, -]) -def test_list_buckets(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListBucketsRequest, + dict, + ], +) +def test_list_buckets(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -962,12 +1352,10 @@ def test_list_buckets(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_buckets(request) @@ -979,7 +1367,7 @@ def test_list_buckets(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_buckets_non_empty_request_with_auto_populated_field(): @@ -987,30 +1375,31 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListBucketsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_buckets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListBucketsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_buckets_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1029,7 +1418,9 @@ def test_list_buckets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_buckets] = mock_rpc request = {} client.list_buckets(request) @@ -1043,8 +1434,11 @@ def test_list_buckets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_buckets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1058,12 +1452,17 @@ async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_buckets in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_buckets + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_buckets] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_buckets + ] = mock_rpc request = {} await client.list_buckets(request) @@ -1077,8 +1476,11 @@ async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): +async def test_list_buckets_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1089,13 +1491,13 @@ async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. @@ -1106,13 +1508,14 @@ async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_buckets_async_from_dict(): await test_list_buckets_async(request_type=dict) + def test_list_buckets_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1122,12 +1525,10 @@ def test_list_buckets_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: call.return_value = logging_config.ListBucketsResponse() client.list_buckets(request) @@ -1139,9 +1540,9 @@ def test_list_buckets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1154,13 +1555,13 @@ async def test_list_buckets_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse() + ) await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. @@ -1171,9 +1572,9 @@ async def test_list_buckets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_buckets_flattened(): @@ -1182,15 +1583,13 @@ def test_list_buckets_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListBucketsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_buckets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1198,7 +1597,7 @@ def test_list_buckets_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1212,9 +1611,10 @@ def test_list_buckets_flattened_error(): with pytest.raises(ValueError): client.list_buckets( logging_config.ListBucketsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_buckets_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -1222,17 +1622,17 @@ async def test_list_buckets_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListBucketsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_buckets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1240,9 +1640,10 @@ async def test_list_buckets_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_buckets_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -1254,7 +1655,7 @@ async def test_list_buckets_flattened_error_async(): with pytest.raises(ValueError): await client.list_buckets( logging_config.ListBucketsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1265,9 +1666,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListBucketsResponse( @@ -1276,17 +1675,17 @@ def test_list_buckets_pager(transport_name: str = "grpc"): logging_config.LogBucket(), logging_config.LogBucket(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListBucketsResponse( buckets=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListBucketsResponse( buckets=[ logging_config.LogBucket(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListBucketsResponse( buckets=[ @@ -1301,9 +1700,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_buckets(request={}, retry=retry, timeout=timeout) @@ -1313,8 +1710,9 @@ def test_list_buckets_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_config.LogBucket) - for i in results) + assert all(isinstance(i, logging_config.LogBucket) for i in results) + + def test_list_buckets_pages(transport_name: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1322,9 +1720,7 @@ def test_list_buckets_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListBucketsResponse( @@ -1333,17 +1729,17 @@ def test_list_buckets_pages(transport_name: str = "grpc"): logging_config.LogBucket(), logging_config.LogBucket(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListBucketsResponse( buckets=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListBucketsResponse( buckets=[ logging_config.LogBucket(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListBucketsResponse( buckets=[ @@ -1354,9 +1750,10 @@ def test_list_buckets_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_buckets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_buckets_async_pager(): client = BaseConfigServiceV2AsyncClient( @@ -1365,8 +1762,8 @@ async def test_list_buckets_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_buckets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_buckets), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListBucketsResponse( @@ -1375,17 +1772,17 @@ async def test_list_buckets_async_pager(): logging_config.LogBucket(), logging_config.LogBucket(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListBucketsResponse( buckets=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListBucketsResponse( buckets=[ logging_config.LogBucket(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListBucketsResponse( buckets=[ @@ -1395,15 +1792,16 @@ async def test_list_buckets_async_pager(): ), RuntimeError, ) - async_pager = await client.list_buckets(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_buckets( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogBucket) - for i in responses) + assert all(isinstance(i, logging_config.LogBucket) for i in responses) @pytest.mark.asyncio @@ -1414,8 +1812,8 @@ async def test_list_buckets_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_buckets), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_buckets), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListBucketsResponse( @@ -1424,17 +1822,17 @@ async def test_list_buckets_async_pages(): logging_config.LogBucket(), logging_config.LogBucket(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListBucketsResponse( buckets=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListBucketsResponse( buckets=[ logging_config.LogBucket(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListBucketsResponse( buckets=[ @@ -1447,18 +1845,22 @@ async def test_list_buckets_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_buckets(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_config.GetBucketRequest, - dict, -]) -def test_get_bucket(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetBucketRequest, + dict, + ], +) +def test_get_bucket(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1469,18 +1871,16 @@ def test_get_bucket(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.get_bucket(request) @@ -1492,13 +1892,13 @@ def test_get_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_get_bucket_non_empty_request_with_auto_populated_field(): @@ -1506,28 +1906,29 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetBucketRequest( - name='name_value', + name="name_value", ) + def test_get_bucket_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1546,7 +1947,9 @@ def test_get_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_bucket] = mock_rpc request = {} client.get_bucket(request) @@ -1560,6 +1963,7 @@ def test_get_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1575,12 +1979,17 @@ async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_as wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_bucket in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_bucket + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_bucket] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_bucket + ] = mock_rpc request = {} await client.get_bucket(request) @@ -1594,8 +2003,11 @@ async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): +async def test_get_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1606,19 +2018,19 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) response = await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1629,19 +2041,20 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio async def test_get_bucket_async_from_dict(): await test_get_bucket_async(request_type=dict) + def test_get_bucket_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1651,12 +2064,10 @@ def test_get_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.get_bucket(request) @@ -1668,9 +2079,9 @@ def test_get_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1683,13 +2094,13 @@ async def test_get_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1700,16 +2111,19 @@ async def test_get_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.CreateBucketRequest, - dict, -]) -def test_create_bucket_async(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateBucketRequest, + dict, + ], +) +def test_create_bucket_async(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1721,10 +2135,10 @@ def test_create_bucket_async(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: + type(client.transport.create_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -1742,30 +2156,33 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) + def test_create_bucket_async_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1780,12 +2197,18 @@ def test_create_bucket_async_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_bucket_async in client._transport._wrapped_methods + assert ( + client._transport.create_bucket_async in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_bucket_async] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_bucket_async] = ( + mock_rpc + ) request = {} client.create_bucket_async(request) @@ -1803,8 +2226,11 @@ def test_create_bucket_async_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_bucket_async_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1818,12 +2244,17 @@ async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_bucket_async in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_bucket_async + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_bucket_async] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_bucket_async + ] = mock_rpc request = {} await client.create_bucket_async(request) @@ -1842,8 +2273,11 @@ async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): +async def test_create_bucket_async_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1855,11 +2289,11 @@ async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', reques # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: + type(client.transport.create_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.create_bucket_async(request) @@ -1877,6 +2311,7 @@ async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', reques async def test_create_bucket_async_async_from_dict(): await test_create_bucket_async_async(request_type=dict) + def test_create_bucket_async_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1886,13 +2321,13 @@ def test_create_bucket_async_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -1903,9 +2338,9 @@ def test_create_bucket_async_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1918,13 +2353,15 @@ async def test_create_bucket_async_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -1935,16 +2372,19 @@ async def test_create_bucket_async_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateBucketRequest, - dict, -]) -def test_update_bucket_async(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateBucketRequest, + dict, + ], +) +def test_update_bucket_async(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1956,10 +2396,10 @@ def test_update_bucket_async(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: + type(client.transport.update_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -1977,28 +2417,31 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) + def test_update_bucket_async_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2013,12 +2456,18 @@ def test_update_bucket_async_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_bucket_async in client._transport._wrapped_methods + assert ( + client._transport.update_bucket_async in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_bucket_async] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_bucket_async] = ( + mock_rpc + ) request = {} client.update_bucket_async(request) @@ -2036,8 +2485,11 @@ def test_update_bucket_async_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_bucket_async_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2051,12 +2503,17 @@ async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_bucket_async in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_bucket_async + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_bucket_async] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_bucket_async + ] = mock_rpc request = {} await client.update_bucket_async(request) @@ -2075,8 +2532,11 @@ async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): +async def test_update_bucket_async_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2088,11 +2548,11 @@ async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', reques # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: + type(client.transport.update_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.update_bucket_async(request) @@ -2110,6 +2570,7 @@ async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', reques async def test_update_bucket_async_async_from_dict(): await test_update_bucket_async_async(request_type=dict) + def test_update_bucket_async_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2119,13 +2580,13 @@ def test_update_bucket_async_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -2136,9 +2597,9 @@ def test_update_bucket_async_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2151,13 +2612,15 @@ async def test_update_bucket_async_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. @@ -2168,16 +2631,19 @@ async def test_update_bucket_async_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.CreateBucketRequest, - dict, -]) -def test_create_bucket(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateBucketRequest, + dict, + ], +) +def test_create_bucket(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2188,18 +2654,16 @@ def test_create_bucket(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.create_bucket(request) @@ -2211,13 +2675,13 @@ def test_create_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_create_bucket_non_empty_request_with_auto_populated_field(): @@ -2225,30 +2689,31 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) + def test_create_bucket_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2267,7 +2732,9 @@ def test_create_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_bucket] = mock_rpc request = {} client.create_bucket(request) @@ -2281,8 +2748,11 @@ def test_create_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2296,12 +2766,17 @@ async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_bucket in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_bucket + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_bucket] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_bucket + ] = mock_rpc request = {} await client.create_bucket(request) @@ -2315,8 +2790,11 @@ async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): +async def test_create_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2327,19 +2805,19 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) response = await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -2350,19 +2828,20 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio async def test_create_bucket_async_from_dict(): await test_create_bucket_async(request_type=dict) + def test_create_bucket_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2372,12 +2851,10 @@ def test_create_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.create_bucket(request) @@ -2389,9 +2866,9 @@ def test_create_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2404,13 +2881,13 @@ async def test_create_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -2421,16 +2898,19 @@ async def test_create_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateBucketRequest, - dict, -]) -def test_update_bucket(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateBucketRequest, + dict, + ], +) +def test_update_bucket(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2441,18 +2921,16 @@ def test_update_bucket(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.update_bucket(request) @@ -2464,13 +2942,13 @@ def test_update_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_update_bucket_non_empty_request_with_auto_populated_field(): @@ -2478,28 +2956,29 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) + def test_update_bucket_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2518,7 +2997,9 @@ def test_update_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_bucket] = mock_rpc request = {} client.update_bucket(request) @@ -2532,8 +3013,11 @@ def test_update_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2547,12 +3031,17 @@ async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_bucket in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_bucket + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_bucket] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_bucket + ] = mock_rpc request = {} await client.update_bucket(request) @@ -2566,8 +3055,11 @@ async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): +async def test_update_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2578,19 +3070,19 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) response = await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -2601,19 +3093,20 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio async def test_update_bucket_async_from_dict(): await test_update_bucket_async(request_type=dict) + def test_update_bucket_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2623,12 +3116,10 @@ def test_update_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.update_bucket(request) @@ -2640,9 +3131,9 @@ def test_update_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2655,13 +3146,13 @@ async def test_update_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket() + ) await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -2672,16 +3163,19 @@ async def test_update_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteBucketRequest, - dict, -]) -def test_delete_bucket(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteBucketRequest, + dict, + ], +) +def test_delete_bucket(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2692,9 +3186,7 @@ def test_delete_bucket(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_bucket(request) @@ -2714,28 +3206,29 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteBucketRequest( - name='name_value', + name="name_value", ) + def test_delete_bucket_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2754,7 +3247,9 @@ def test_delete_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_bucket] = mock_rpc request = {} client.delete_bucket(request) @@ -2768,8 +3263,11 @@ def test_delete_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2783,12 +3281,17 @@ async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_bucket in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_bucket + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_bucket] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_bucket + ] = mock_rpc request = {} await client.delete_bucket(request) @@ -2802,8 +3305,11 @@ async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): +async def test_delete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2814,9 +3320,7 @@ async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_bucket(request) @@ -2835,6 +3339,7 @@ async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type async def test_delete_bucket_async_from_dict(): await test_delete_bucket_async(request_type=dict) + def test_delete_bucket_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2844,12 +3349,10 @@ def test_delete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = None client.delete_bucket(request) @@ -2861,9 +3364,9 @@ def test_delete_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2876,12 +3379,10 @@ async def test_delete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_bucket(request) @@ -2893,16 +3394,19 @@ async def test_delete_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.UndeleteBucketRequest, - dict, -]) -def test_undelete_bucket(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UndeleteBucketRequest, + dict, + ], +) +def test_undelete_bucket(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2913,9 +3417,7 @@ def test_undelete_bucket(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.undelete_bucket(request) @@ -2935,28 +3437,29 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UndeleteBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.undelete_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UndeleteBucketRequest( - name='name_value', + name="name_value", ) + def test_undelete_bucket_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2975,7 +3478,9 @@ def test_undelete_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.undelete_bucket] = mock_rpc request = {} client.undelete_bucket(request) @@ -2989,8 +3494,11 @@ def test_undelete_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_undelete_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3004,12 +3512,17 @@ async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.undelete_bucket in client._client._transport._wrapped_methods + assert ( + client._client._transport.undelete_bucket + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.undelete_bucket] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.undelete_bucket + ] = mock_rpc request = {} await client.undelete_bucket(request) @@ -3023,8 +3536,11 @@ async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): +async def test_undelete_bucket_async( + transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3035,9 +3551,7 @@ async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.undelete_bucket(request) @@ -3056,6 +3570,7 @@ async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_ty async def test_undelete_bucket_async_from_dict(): await test_undelete_bucket_async(request_type=dict) + def test_undelete_bucket_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3065,12 +3580,10 @@ def test_undelete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = None client.undelete_bucket(request) @@ -3082,9 +3595,9 @@ def test_undelete_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3097,12 +3610,10 @@ async def test_undelete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.undelete_bucket(request) @@ -3114,16 +3625,19 @@ async def test_undelete_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.ListViewsRequest, - dict, -]) -def test__list_views(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListViewsRequest, + dict, + ], +) +def test__list_views(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3134,12 +3648,10 @@ def test__list_views(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client._list_views(request) @@ -3151,7 +3663,7 @@ def test__list_views(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test__list_views_non_empty_request_with_auto_populated_field(): @@ -3159,30 +3671,31 @@ def test__list_views_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListViewsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._list_views(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListViewsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test__list_views_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3201,7 +3714,9 @@ def test__list_views_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_views] = mock_rpc request = {} client._list_views(request) @@ -3215,8 +3730,11 @@ def test__list_views_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__list_views_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3230,12 +3748,17 @@ async def test__list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_views in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_views + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_views] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_views + ] = mock_rpc request = {} await client._list_views(request) @@ -3249,8 +3772,11 @@ async def test__list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): +async def test__list_views_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3261,13 +3787,13 @@ async def test__list_views_async(transport: str = 'grpc_asyncio', request_type=l request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client._list_views(request) # Establish that the underlying gRPC stub method was called. @@ -3278,13 +3804,14 @@ async def test__list_views_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test__list_views_async_from_dict(): await test__list_views_async(request_type=dict) + def test__list_views_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3294,12 +3821,10 @@ def test__list_views_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: call.return_value = logging_config.ListViewsResponse() client._list_views(request) @@ -3311,9 +3836,9 @@ def test__list_views_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3326,13 +3851,13 @@ async def test__list_views_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) + with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse() + ) await client._list_views(request) # Establish that the underlying gRPC stub method was called. @@ -3343,9 +3868,9 @@ async def test__list_views_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test__list_views_flattened(): @@ -3354,15 +3879,13 @@ def test__list_views_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._list_views( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3370,7 +3893,7 @@ def test__list_views_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -3384,9 +3907,10 @@ def test__list_views_flattened_error(): with pytest.raises(ValueError): client._list_views( logging_config.ListViewsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test__list_views_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -3394,17 +3918,17 @@ async def test__list_views_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._list_views( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3412,9 +3936,10 @@ async def test__list_views_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test__list_views_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -3426,7 +3951,7 @@ async def test__list_views_flattened_error_async(): with pytest.raises(ValueError): await client._list_views( logging_config.ListViewsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -3437,9 +3962,7 @@ def test__list_views_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListViewsResponse( @@ -3448,17 +3971,17 @@ def test__list_views_pager(transport_name: str = "grpc"): logging_config.LogView(), logging_config.LogView(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListViewsResponse( views=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListViewsResponse( views=[ logging_config.LogView(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListViewsResponse( views=[ @@ -3473,9 +3996,7 @@ def test__list_views_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client._list_views(request={}, retry=retry, timeout=timeout) @@ -3485,8 +4006,9 @@ def test__list_views_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_config.LogView) - for i in results) + assert all(isinstance(i, logging_config.LogView) for i in results) + + def test__list_views_pages(transport_name: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3494,9 +4016,7 @@ def test__list_views_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListViewsResponse( @@ -3505,17 +4025,17 @@ def test__list_views_pages(transport_name: str = "grpc"): logging_config.LogView(), logging_config.LogView(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListViewsResponse( views=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListViewsResponse( views=[ logging_config.LogView(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListViewsResponse( views=[ @@ -3526,9 +4046,10 @@ def test__list_views_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client._list_views(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test__list_views_async_pager(): client = BaseConfigServiceV2AsyncClient( @@ -3537,8 +4058,8 @@ async def test__list_views_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_views), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListViewsResponse( @@ -3547,17 +4068,17 @@ async def test__list_views_async_pager(): logging_config.LogView(), logging_config.LogView(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListViewsResponse( views=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListViewsResponse( views=[ logging_config.LogView(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListViewsResponse( views=[ @@ -3567,15 +4088,16 @@ async def test__list_views_async_pager(): ), RuntimeError, ) - async_pager = await client._list_views(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client._list_views( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogView) - for i in responses) + assert all(isinstance(i, logging_config.LogView) for i in responses) @pytest.mark.asyncio @@ -3586,8 +4108,8 @@ async def test__list_views_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_views), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_views), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListViewsResponse( @@ -3596,17 +4118,17 @@ async def test__list_views_async_pages(): logging_config.LogView(), logging_config.LogView(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListViewsResponse( views=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListViewsResponse( views=[ logging_config.LogView(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListViewsResponse( views=[ @@ -3619,18 +4141,22 @@ async def test__list_views_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client._list_views(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_config.GetViewRequest, - dict, -]) -def test__get_view(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetViewRequest, + dict, + ], +) +def test__get_view(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3641,14 +4167,12 @@ def test__get_view(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: + with mock.patch.object(type(client.transport.get_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client._get_view(request) @@ -3660,9 +4184,9 @@ def test__get_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test__get_view_non_empty_request_with_auto_populated_field(): @@ -3670,28 +4194,29 @@ def test__get_view_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._get_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetViewRequest( - name='name_value', + name="name_value", ) + def test__get_view_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3710,7 +4235,9 @@ def test__get_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_view] = mock_rpc request = {} client._get_view(request) @@ -3724,6 +4251,7 @@ def test__get_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test__get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3739,12 +4267,17 @@ async def test__get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asy wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_view in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_view + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_view] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_view + ] = mock_rpc request = {} await client._get_view(request) @@ -3758,8 +4291,11 @@ async def test__get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asy assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): +async def test__get_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3770,15 +4306,15 @@ async def test__get_view_async(transport: str = 'grpc_asyncio', request_type=log request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) response = await client._get_view(request) # Establish that the underlying gRPC stub method was called. @@ -3789,15 +4325,16 @@ async def test__get_view_async(transport: str = 'grpc_asyncio', request_type=log # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio async def test__get_view_async_from_dict(): await test__get_view_async(request_type=dict) + def test__get_view_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3807,12 +4344,10 @@ def test__get_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: + with mock.patch.object(type(client.transport.get_view), "__call__") as call: call.return_value = logging_config.LogView() client._get_view(request) @@ -3824,9 +4359,9 @@ def test__get_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3839,13 +4374,13 @@ async def test__get_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView() + ) await client._get_view(request) # Establish that the underlying gRPC stub method was called. @@ -3856,16 +4391,19 @@ async def test__get_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.CreateViewRequest, - dict, -]) -def test__create_view(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateViewRequest, + dict, + ], +) +def test__create_view(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3876,14 +4414,12 @@ def test__create_view(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client._create_view(request) @@ -3895,9 +4431,9 @@ def test__create_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test__create_view_non_empty_request_with_auto_populated_field(): @@ -3905,30 +4441,31 @@ def test__create_view_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateViewRequest( - parent='parent_value', - view_id='view_id_value', + parent="parent_value", + view_id="view_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._create_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateViewRequest( - parent='parent_value', - view_id='view_id_value', + parent="parent_value", + view_id="view_id_value", ) + def test__create_view_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3947,7 +4484,9 @@ def test__create_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_view] = mock_rpc request = {} client._create_view(request) @@ -3961,8 +4500,11 @@ def test__create_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__create_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3976,12 +4518,17 @@ async def test__create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_view in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_view + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_view] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_view + ] = mock_rpc request = {} await client._create_view(request) @@ -3995,8 +4542,11 @@ async def test__create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): +async def test__create_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4007,15 +4557,15 @@ async def test__create_view_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) response = await client._create_view(request) # Establish that the underlying gRPC stub method was called. @@ -4026,15 +4576,16 @@ async def test__create_view_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio async def test__create_view_async_from_dict(): await test__create_view_async(request_type=dict) + def test__create_view_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4044,12 +4595,10 @@ def test__create_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value = logging_config.LogView() client._create_view(request) @@ -4061,9 +4610,9 @@ def test__create_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4076,13 +4625,13 @@ async def test__create_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView() + ) await client._create_view(request) # Establish that the underlying gRPC stub method was called. @@ -4093,16 +4642,19 @@ async def test__create_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateViewRequest, - dict, -]) -def test__update_view(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateViewRequest, + dict, + ], +) +def test__update_view(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4113,14 +4665,12 @@ def test__update_view(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client._update_view(request) @@ -4132,9 +4682,9 @@ def test__update_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test__update_view_non_empty_request_with_auto_populated_field(): @@ -4142,28 +4692,29 @@ def test__update_view_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._update_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateViewRequest( - name='name_value', + name="name_value", ) + def test__update_view_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4182,7 +4733,9 @@ def test__update_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_view] = mock_rpc request = {} client._update_view(request) @@ -4196,8 +4749,11 @@ def test__update_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__update_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4211,12 +4767,17 @@ async def test__update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_view in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_view + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_view] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_view + ] = mock_rpc request = {} await client._update_view(request) @@ -4230,8 +4791,11 @@ async def test__update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): +async def test__update_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4242,15 +4806,15 @@ async def test__update_view_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) response = await client._update_view(request) # Establish that the underlying gRPC stub method was called. @@ -4261,15 +4825,16 @@ async def test__update_view_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio async def test__update_view_async_from_dict(): await test__update_view_async(request_type=dict) + def test__update_view_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4279,12 +4844,10 @@ def test__update_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: call.return_value = logging_config.LogView() client._update_view(request) @@ -4296,9 +4859,9 @@ def test__update_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4311,13 +4874,13 @@ async def test__update_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView() + ) await client._update_view(request) # Establish that the underlying gRPC stub method was called. @@ -4328,16 +4891,19 @@ async def test__update_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteViewRequest, - dict, -]) -def test__delete_view(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteViewRequest, + dict, + ], +) +def test__delete_view(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4348,9 +4914,7 @@ def test__delete_view(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client._delete_view(request) @@ -4370,28 +4934,29 @@ def test__delete_view_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._delete_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteViewRequest( - name='name_value', + name="name_value", ) + def test__delete_view_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4410,7 +4975,9 @@ def test__delete_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc request = {} client._delete_view(request) @@ -4424,8 +4991,11 @@ def test__delete_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__delete_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4439,12 +5009,17 @@ async def test__delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_view in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_view + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_view] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_view + ] = mock_rpc request = {} await client._delete_view(request) @@ -4458,8 +5033,11 @@ async def test__delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): +async def test__delete_view_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4470,9 +5048,7 @@ async def test__delete_view_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client._delete_view(request) @@ -4491,6 +5067,7 @@ async def test__delete_view_async(transport: str = 'grpc_asyncio', request_type= async def test__delete_view_async_from_dict(): await test__delete_view_async(request_type=dict) + def test__delete_view_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4500,12 +5077,10 @@ def test__delete_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = None client._delete_view(request) @@ -4517,9 +5092,9 @@ def test__delete_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4532,12 +5107,10 @@ async def test__delete_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_view(request) @@ -4549,16 +5122,19 @@ async def test__delete_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.ListSinksRequest, - dict, -]) -def test__list_sinks(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListSinksRequest, + dict, + ], +) +def test__list_sinks(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4569,12 +5145,10 @@ def test__list_sinks(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client._list_sinks(request) @@ -4586,7 +5160,7 @@ def test__list_sinks(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test__list_sinks_non_empty_request_with_auto_populated_field(): @@ -4594,30 +5168,31 @@ def test__list_sinks_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListSinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._list_sinks(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListSinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test__list_sinks_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4636,7 +5211,9 @@ def test__list_sinks_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_sinks] = mock_rpc request = {} client._list_sinks(request) @@ -4650,8 +5227,11 @@ def test__list_sinks_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__list_sinks_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4665,12 +5245,17 @@ async def test__list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_sinks in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_sinks + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_sinks] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_sinks + ] = mock_rpc request = {} await client._list_sinks(request) @@ -4684,8 +5269,11 @@ async def test__list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): +async def test__list_sinks_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4696,13 +5284,13 @@ async def test__list_sinks_async(transport: str = 'grpc_asyncio', request_type=l request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse( + next_page_token="next_page_token_value", + ) + ) response = await client._list_sinks(request) # Establish that the underlying gRPC stub method was called. @@ -4713,13 +5301,14 @@ async def test__list_sinks_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test__list_sinks_async_from_dict(): await test__list_sinks_async(request_type=dict) + def test__list_sinks_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4729,12 +5318,10 @@ def test__list_sinks_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: call.return_value = logging_config.ListSinksResponse() client._list_sinks(request) @@ -4746,9 +5333,9 @@ def test__list_sinks_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4761,13 +5348,13 @@ async def test__list_sinks_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse() + ) await client._list_sinks(request) # Establish that the underlying gRPC stub method was called. @@ -4778,9 +5365,9 @@ async def test__list_sinks_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test__list_sinks_flattened(): @@ -4789,15 +5376,13 @@ def test__list_sinks_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._list_sinks( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -4805,7 +5390,7 @@ def test__list_sinks_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -4819,9 +5404,10 @@ def test__list_sinks_flattened_error(): with pytest.raises(ValueError): client._list_sinks( logging_config.ListSinksRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test__list_sinks_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -4829,17 +5415,17 @@ async def test__list_sinks_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._list_sinks( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -4847,9 +5433,10 @@ async def test__list_sinks_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test__list_sinks_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -4861,7 +5448,7 @@ async def test__list_sinks_flattened_error_async(): with pytest.raises(ValueError): await client._list_sinks( logging_config.ListSinksRequest(), - parent='parent_value', + parent="parent_value", ) @@ -4872,9 +5459,7 @@ def test__list_sinks_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListSinksResponse( @@ -4883,17 +5468,17 @@ def test__list_sinks_pager(transport_name: str = "grpc"): logging_config.LogSink(), logging_config.LogSink(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListSinksResponse( sinks=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListSinksResponse( sinks=[ logging_config.LogSink(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListSinksResponse( sinks=[ @@ -4908,9 +5493,7 @@ def test__list_sinks_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client._list_sinks(request={}, retry=retry, timeout=timeout) @@ -4920,8 +5503,9 @@ def test__list_sinks_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_config.LogSink) - for i in results) + assert all(isinstance(i, logging_config.LogSink) for i in results) + + def test__list_sinks_pages(transport_name: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4929,9 +5513,7 @@ def test__list_sinks_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListSinksResponse( @@ -4940,17 +5522,17 @@ def test__list_sinks_pages(transport_name: str = "grpc"): logging_config.LogSink(), logging_config.LogSink(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListSinksResponse( sinks=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListSinksResponse( sinks=[ logging_config.LogSink(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListSinksResponse( sinks=[ @@ -4961,9 +5543,10 @@ def test__list_sinks_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client._list_sinks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test__list_sinks_async_pager(): client = BaseConfigServiceV2AsyncClient( @@ -4972,8 +5555,8 @@ async def test__list_sinks_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_sinks), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListSinksResponse( @@ -4982,17 +5565,17 @@ async def test__list_sinks_async_pager(): logging_config.LogSink(), logging_config.LogSink(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListSinksResponse( sinks=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListSinksResponse( sinks=[ logging_config.LogSink(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListSinksResponse( sinks=[ @@ -5002,15 +5585,16 @@ async def test__list_sinks_async_pager(): ), RuntimeError, ) - async_pager = await client._list_sinks(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client._list_sinks( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogSink) - for i in responses) + assert all(isinstance(i, logging_config.LogSink) for i in responses) @pytest.mark.asyncio @@ -5021,8 +5605,8 @@ async def test__list_sinks_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_sinks), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_sinks), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListSinksResponse( @@ -5031,17 +5615,17 @@ async def test__list_sinks_async_pages(): logging_config.LogSink(), logging_config.LogSink(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListSinksResponse( sinks=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListSinksResponse( sinks=[ logging_config.LogSink(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListSinksResponse( sinks=[ @@ -5054,18 +5638,22 @@ async def test__list_sinks_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client._list_sinks(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_config.GetSinkRequest, - dict, -]) -def test__get_sink(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetSinkRequest, + dict, + ], +) +def test__get_sink(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5076,18 +5664,16 @@ def test__get_sink(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client._get_sink(request) @@ -5100,13 +5686,13 @@ def test__get_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5115,28 +5701,29 @@ def test__get_sink_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._get_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) + def test__get_sink_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5155,7 +5742,9 @@ def test__get_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_sink] = mock_rpc request = {} client._get_sink(request) @@ -5169,6 +5758,7 @@ def test__get_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test__get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5184,12 +5774,17 @@ async def test__get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asy wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_sink in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_sink + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_sink] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_sink + ] = mock_rpc request = {} await client._get_sink(request) @@ -5203,8 +5798,11 @@ async def test__get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asy assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): +async def test__get_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5215,20 +5813,20 @@ async def test__get_sink_async(transport: str = 'grpc_asyncio', request_type=log request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) response = await client._get_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5239,13 +5837,13 @@ async def test__get_sink_async(transport: str = 'grpc_asyncio', request_type=log # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5253,6 +5851,7 @@ async def test__get_sink_async(transport: str = 'grpc_asyncio', request_type=log async def test__get_sink_async_from_dict(): await test__get_sink_async(request_type=dict) + def test__get_sink_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5262,12 +5861,10 @@ def test__get_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: call.return_value = logging_config.LogSink() client._get_sink(request) @@ -5279,9 +5876,9 @@ def test__get_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5294,13 +5891,13 @@ async def test__get_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) await client._get_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5311,9 +5908,9 @@ async def test__get_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] def test__get_sink_flattened(): @@ -5322,15 +5919,13 @@ def test__get_sink_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._get_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -5338,7 +5933,7 @@ def test__get_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @@ -5352,9 +5947,10 @@ def test__get_sink_flattened_error(): with pytest.raises(ValueError): client._get_sink( logging_config.GetSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) + @pytest.mark.asyncio async def test__get_sink_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -5362,17 +5958,17 @@ async def test__get_sink_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._get_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -5380,9 +5976,10 @@ async def test__get_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val + @pytest.mark.asyncio async def test__get_sink_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -5394,15 +5991,18 @@ async def test__get_sink_flattened_error_async(): with pytest.raises(ValueError): await client._get_sink( logging_config.GetSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.CreateSinkRequest, - dict, -]) -def test__create_sink(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateSinkRequest, + dict, + ], +) +def test__create_sink(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5413,18 +6013,16 @@ def test__create_sink(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client._create_sink(request) @@ -5437,13 +6035,13 @@ def test__create_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5452,28 +6050,29 @@ def test__create_sink_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateSinkRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._create_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateSinkRequest( - parent='parent_value', + parent="parent_value", ) + def test__create_sink_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5492,7 +6091,9 @@ def test__create_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_sink] = mock_rpc request = {} client._create_sink(request) @@ -5506,8 +6107,11 @@ def test__create_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__create_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5521,12 +6125,17 @@ async def test__create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_sink in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_sink + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_sink] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_sink + ] = mock_rpc request = {} await client._create_sink(request) @@ -5540,8 +6149,11 @@ async def test__create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): +async def test__create_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5552,20 +6164,20 @@ async def test__create_sink_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) response = await client._create_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5576,13 +6188,13 @@ async def test__create_sink_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5590,6 +6202,7 @@ async def test__create_sink_async(transport: str = 'grpc_asyncio', request_type= async def test__create_sink_async_from_dict(): await test__create_sink_async(request_type=dict) + def test__create_sink_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5599,12 +6212,10 @@ def test__create_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: call.return_value = logging_config.LogSink() client._create_sink(request) @@ -5616,9 +6227,9 @@ def test__create_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5631,13 +6242,13 @@ async def test__create_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) await client._create_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5648,9 +6259,9 @@ async def test__create_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test__create_sink_flattened(): @@ -5659,16 +6270,14 @@ def test__create_sink_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._create_sink( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -5676,10 +6285,10 @@ def test__create_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val @@ -5693,10 +6302,11 @@ def test__create_sink_flattened_error(): with pytest.raises(ValueError): client._create_sink( logging_config.CreateSinkRequest(), - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) + @pytest.mark.asyncio async def test__create_sink_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -5704,18 +6314,18 @@ async def test__create_sink_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._create_sink( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -5723,12 +6333,13 @@ async def test__create_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test__create_sink_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -5740,16 +6351,19 @@ async def test__create_sink_flattened_error_async(): with pytest.raises(ValueError): await client._create_sink( logging_config.CreateSinkRequest(), - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateSinkRequest, - dict, -]) -def test__update_sink(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateSinkRequest, + dict, + ], +) +def test__update_sink(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5760,18 +6374,16 @@ def test__update_sink(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client._update_sink(request) @@ -5784,13 +6396,13 @@ def test__update_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5799,28 +6411,29 @@ def test__update_sink_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._update_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) + def test__update_sink_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5839,7 +6452,9 @@ def test__update_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_sink] = mock_rpc request = {} client._update_sink(request) @@ -5853,8 +6468,11 @@ def test__update_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__update_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -5868,12 +6486,17 @@ async def test__update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_sink in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_sink + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_sink] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_sink + ] = mock_rpc request = {} await client._update_sink(request) @@ -5887,8 +6510,11 @@ async def test__update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): +async def test__update_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -5899,20 +6525,20 @@ async def test__update_sink_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) response = await client._update_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5923,13 +6549,13 @@ async def test__update_sink_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5937,6 +6563,7 @@ async def test__update_sink_async(transport: str = 'grpc_asyncio', request_type= async def test__update_sink_async_from_dict(): await test__update_sink_async(request_type=dict) + def test__update_sink_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5946,12 +6573,10 @@ def test__update_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: call.return_value = logging_config.LogSink() client._update_sink(request) @@ -5963,9 +6588,9 @@ def test__update_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5978,13 +6603,13 @@ async def test__update_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) await client._update_sink(request) # Establish that the underlying gRPC stub method was called. @@ -5995,9 +6620,9 @@ async def test__update_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] def test__update_sink_flattened(): @@ -6006,17 +6631,15 @@ def test__update_sink_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._update_sink( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6024,13 +6647,13 @@ def test__update_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -6044,11 +6667,12 @@ def test__update_sink_flattened_error(): with pytest.raises(ValueError): client._update_sink( logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test__update_sink_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -6056,19 +6680,19 @@ async def test__update_sink_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._update_sink( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6076,15 +6700,16 @@ async def test__update_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test__update_sink_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -6096,17 +6721,20 @@ async def test__update_sink_flattened_error_async(): with pytest.raises(ValueError): await client._update_sink( logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteSinkRequest, - dict, -]) -def test__delete_sink(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteSinkRequest, + dict, + ], +) +def test__delete_sink(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6117,9 +6745,7 @@ def test__delete_sink(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client._delete_sink(request) @@ -6139,28 +6765,29 @@ def test__delete_sink_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._delete_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) + def test__delete_sink_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6179,7 +6806,9 @@ def test__delete_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_sink] = mock_rpc request = {} client._delete_sink(request) @@ -6193,8 +6822,11 @@ def test__delete_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__delete_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6208,12 +6840,17 @@ async def test__delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_sink in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_sink + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_sink] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_sink + ] = mock_rpc request = {} await client._delete_sink(request) @@ -6227,8 +6864,11 @@ async def test__delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): +async def test__delete_sink_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6239,9 +6879,7 @@ async def test__delete_sink_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client._delete_sink(request) @@ -6260,6 +6898,7 @@ async def test__delete_sink_async(transport: str = 'grpc_asyncio', request_type= async def test__delete_sink_async_from_dict(): await test__delete_sink_async(request_type=dict) + def test__delete_sink_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -6269,12 +6908,10 @@ def test__delete_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = None client._delete_sink(request) @@ -6286,9 +6923,9 @@ def test__delete_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6301,12 +6938,10 @@ async def test__delete_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_sink(request) @@ -6318,9 +6953,9 @@ async def test__delete_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'sink_name=sink_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "sink_name=sink_name_value", + ) in kw["metadata"] def test__delete_sink_flattened(): @@ -6329,15 +6964,13 @@ def test__delete_sink_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._delete_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -6345,7 +6978,7 @@ def test__delete_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @@ -6359,9 +6992,10 @@ def test__delete_sink_flattened_error(): with pytest.raises(ValueError): client._delete_sink( logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) + @pytest.mark.asyncio async def test__delete_sink_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -6369,9 +7003,7 @@ async def test__delete_sink_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -6379,7 +7011,7 @@ async def test__delete_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._delete_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -6387,9 +7019,10 @@ async def test__delete_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val + @pytest.mark.asyncio async def test__delete_sink_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -6401,15 +7034,18 @@ async def test__delete_sink_flattened_error_async(): with pytest.raises(ValueError): await client._delete_sink( logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.CreateLinkRequest, - dict, -]) -def test__create_link(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateLinkRequest, + dict, + ], +) +def test__create_link(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6420,11 +7056,9 @@ def test__create_link(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client._create_link(request) # Establish that the underlying gRPC stub method was called. @@ -6442,30 +7076,31 @@ def test__create_link_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateLinkRequest( - parent='parent_value', - link_id='link_id_value', + parent="parent_value", + link_id="link_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._create_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateLinkRequest( - parent='parent_value', - link_id='link_id_value', + parent="parent_value", + link_id="link_id_value", ) + def test__create_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6484,7 +7119,9 @@ def test__create_link_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_link] = mock_rpc request = {} client._create_link(request) @@ -6503,8 +7140,11 @@ def test__create_link_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__create_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6518,12 +7158,17 @@ async def test__create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_link in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_link + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_link] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_link + ] = mock_rpc request = {} await client._create_link(request) @@ -6542,8 +7187,11 @@ async def test__create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__create_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateLinkRequest): +async def test__create_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateLinkRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6554,12 +7202,10 @@ async def test__create_link_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client._create_link(request) @@ -6577,6 +7223,7 @@ async def test__create_link_async(transport: str = 'grpc_asyncio', request_type= async def test__create_link_async_from_dict(): await test__create_link_async(request_type=dict) + def test__create_link_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -6586,13 +7233,11 @@ def test__create_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateLinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client._create_link(request) # Establish that the underlying gRPC stub method was called. @@ -6603,9 +7248,9 @@ def test__create_link_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6618,13 +7263,13 @@ async def test__create_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateLinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client._create_link(request) # Establish that the underlying gRPC stub method was called. @@ -6635,9 +7280,9 @@ async def test__create_link_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test__create_link_flattened(): @@ -6646,17 +7291,15 @@ def test__create_link_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._create_link( - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) # Establish that the underlying call was made with the expected @@ -6664,13 +7307,13 @@ def test__create_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].link - mock_val = logging_config.Link(name='name_value') + mock_val = logging_config.Link(name="name_value") assert arg == mock_val arg = args[0].link_id - mock_val = 'link_id_value' + mock_val = "link_id_value" assert arg == mock_val @@ -6684,11 +7327,12 @@ def test__create_link_flattened_error(): with pytest.raises(ValueError): client._create_link( logging_config.CreateLinkRequest(), - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) + @pytest.mark.asyncio async def test__create_link_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -6696,21 +7340,19 @@ async def test__create_link_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._create_link( - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) # Establish that the underlying call was made with the expected @@ -6718,15 +7360,16 @@ async def test__create_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].link - mock_val = logging_config.Link(name='name_value') + mock_val = logging_config.Link(name="name_value") assert arg == mock_val arg = args[0].link_id - mock_val = 'link_id_value' + mock_val = "link_id_value" assert arg == mock_val + @pytest.mark.asyncio async def test__create_link_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -6738,17 +7381,20 @@ async def test__create_link_flattened_error_async(): with pytest.raises(ValueError): await client._create_link( logging_config.CreateLinkRequest(), - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteLinkRequest, - dict, -]) -def test__delete_link(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteLinkRequest, + dict, + ], +) +def test__delete_link(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6759,11 +7405,9 @@ def test__delete_link(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client._delete_link(request) # Establish that the underlying gRPC stub method was called. @@ -6781,28 +7425,29 @@ def test__delete_link_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteLinkRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._delete_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteLinkRequest( - name='name_value', + name="name_value", ) + def test__delete_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6821,7 +7466,9 @@ def test__delete_link_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_link] = mock_rpc request = {} client._delete_link(request) @@ -6840,8 +7487,11 @@ def test__delete_link_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__delete_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6855,12 +7505,17 @@ async def test__delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_link in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_link + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_link] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_link + ] = mock_rpc request = {} await client._delete_link(request) @@ -6879,8 +7534,11 @@ async def test__delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__delete_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteLinkRequest): +async def test__delete_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteLinkRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -6891,12 +7549,10 @@ async def test__delete_link_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client._delete_link(request) @@ -6914,6 +7570,7 @@ async def test__delete_link_async(transport: str = 'grpc_asyncio', request_type= async def test__delete_link_async_from_dict(): await test__delete_link_async(request_type=dict) + def test__delete_link_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -6923,13 +7580,11 @@ def test__delete_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client._delete_link(request) # Establish that the underlying gRPC stub method was called. @@ -6940,9 +7595,9 @@ def test__delete_link_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -6955,13 +7610,13 @@ async def test__delete_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client._delete_link(request) # Establish that the underlying gRPC stub method was called. @@ -6972,9 +7627,9 @@ async def test__delete_link_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test__delete_link_flattened(): @@ -6983,15 +7638,13 @@ def test__delete_link_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._delete_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6999,7 +7652,7 @@ def test__delete_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7013,9 +7666,10 @@ def test__delete_link_flattened_error(): with pytest.raises(ValueError): client._delete_link( logging_config.DeleteLinkRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test__delete_link_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -7023,19 +7677,17 @@ async def test__delete_link_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._delete_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7043,9 +7695,10 @@ async def test__delete_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test__delete_link_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -7057,15 +7710,18 @@ async def test__delete_link_flattened_error_async(): with pytest.raises(ValueError): await client._delete_link( logging_config.DeleteLinkRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.ListLinksRequest, - dict, -]) -def test__list_links(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListLinksRequest, + dict, + ], +) +def test__list_links(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7076,12 +7732,10 @@ def test__list_links(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListLinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client._list_links(request) @@ -7093,7 +7747,7 @@ def test__list_links(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test__list_links_non_empty_request_with_auto_populated_field(): @@ -7101,30 +7755,31 @@ def test__list_links_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListLinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._list_links(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListLinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test__list_links_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7143,7 +7798,9 @@ def test__list_links_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_links] = mock_rpc request = {} client._list_links(request) @@ -7157,8 +7814,11 @@ def test__list_links_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__list_links_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7172,12 +7832,17 @@ async def test__list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_links in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_links + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_links] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_links + ] = mock_rpc request = {} await client._list_links(request) @@ -7191,8 +7856,11 @@ async def test__list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__list_links_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListLinksRequest): +async def test__list_links_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListLinksRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7203,13 +7871,13 @@ async def test__list_links_async(transport: str = 'grpc_asyncio', request_type=l request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse( + next_page_token="next_page_token_value", + ) + ) response = await client._list_links(request) # Establish that the underlying gRPC stub method was called. @@ -7220,13 +7888,14 @@ async def test__list_links_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test__list_links_async_from_dict(): await test__list_links_async(request_type=dict) + def test__list_links_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7236,12 +7905,10 @@ def test__list_links_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListLinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: call.return_value = logging_config.ListLinksResponse() client._list_links(request) @@ -7253,9 +7920,9 @@ def test__list_links_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7268,13 +7935,13 @@ async def test__list_links_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListLinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse()) + with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse() + ) await client._list_links(request) # Establish that the underlying gRPC stub method was called. @@ -7285,9 +7952,9 @@ async def test__list_links_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test__list_links_flattened(): @@ -7296,15 +7963,13 @@ def test__list_links_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._list_links( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -7312,7 +7977,7 @@ def test__list_links_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -7326,9 +7991,10 @@ def test__list_links_flattened_error(): with pytest.raises(ValueError): client._list_links( logging_config.ListLinksRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test__list_links_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -7336,17 +8002,17 @@ async def test__list_links_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListLinksResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._list_links( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -7354,9 +8020,10 @@ async def test__list_links_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test__list_links_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -7368,7 +8035,7 @@ async def test__list_links_flattened_error_async(): with pytest.raises(ValueError): await client._list_links( logging_config.ListLinksRequest(), - parent='parent_value', + parent="parent_value", ) @@ -7379,9 +8046,7 @@ def test__list_links_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListLinksResponse( @@ -7390,17 +8055,17 @@ def test__list_links_pager(transport_name: str = "grpc"): logging_config.Link(), logging_config.Link(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListLinksResponse( links=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListLinksResponse( links=[ logging_config.Link(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListLinksResponse( links=[ @@ -7415,9 +8080,7 @@ def test__list_links_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client._list_links(request={}, retry=retry, timeout=timeout) @@ -7427,8 +8090,9 @@ def test__list_links_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_config.Link) - for i in results) + assert all(isinstance(i, logging_config.Link) for i in results) + + def test__list_links_pages(transport_name: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7436,9 +8100,7 @@ def test__list_links_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListLinksResponse( @@ -7447,17 +8109,17 @@ def test__list_links_pages(transport_name: str = "grpc"): logging_config.Link(), logging_config.Link(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListLinksResponse( links=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListLinksResponse( links=[ logging_config.Link(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListLinksResponse( links=[ @@ -7468,9 +8130,10 @@ def test__list_links_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client._list_links(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test__list_links_async_pager(): client = BaseConfigServiceV2AsyncClient( @@ -7479,8 +8142,8 @@ async def test__list_links_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_links), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_links), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListLinksResponse( @@ -7489,17 +8152,17 @@ async def test__list_links_async_pager(): logging_config.Link(), logging_config.Link(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListLinksResponse( links=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListLinksResponse( links=[ logging_config.Link(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListLinksResponse( links=[ @@ -7509,15 +8172,16 @@ async def test__list_links_async_pager(): ), RuntimeError, ) - async_pager = await client._list_links(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client._list_links( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_config.Link) - for i in responses) + assert all(isinstance(i, logging_config.Link) for i in responses) @pytest.mark.asyncio @@ -7528,8 +8192,8 @@ async def test__list_links_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_links), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_links), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListLinksResponse( @@ -7538,17 +8202,17 @@ async def test__list_links_async_pages(): logging_config.Link(), logging_config.Link(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListLinksResponse( links=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListLinksResponse( links=[ logging_config.Link(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListLinksResponse( links=[ @@ -7561,18 +8225,22 @@ async def test__list_links_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client._list_links(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_config.GetLinkRequest, - dict, -]) -def test__get_link(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetLinkRequest, + dict, + ], +) +def test__get_link(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7583,13 +8251,11 @@ def test__get_link(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Link( - name='name_value', - description='description_value', + name="name_value", + description="description_value", lifecycle_state=logging_config.LifecycleState.ACTIVE, ) response = client._get_link(request) @@ -7602,8 +8268,8 @@ def test__get_link(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -7612,28 +8278,29 @@ def test__get_link_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetLinkRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._get_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetLinkRequest( - name='name_value', + name="name_value", ) + def test__get_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7652,7 +8319,9 @@ def test__get_link_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_link] = mock_rpc request = {} client._get_link(request) @@ -7666,6 +8335,7 @@ def test__get_link_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test__get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7681,12 +8351,17 @@ async def test__get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asy wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_link in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_link + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_link] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_link + ] = mock_rpc request = {} await client._get_link(request) @@ -7700,8 +8375,11 @@ async def test__get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asy assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__get_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetLinkRequest): +async def test__get_link_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetLinkRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -7712,15 +8390,15 @@ async def test__get_link_async(transport: str = 'grpc_asyncio', request_type=log request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( - name='name_value', - description='description_value', - lifecycle_state=logging_config.LifecycleState.ACTIVE, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Link( + name="name_value", + description="description_value", + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) response = await client._get_link(request) # Establish that the underlying gRPC stub method was called. @@ -7731,8 +8409,8 @@ async def test__get_link_async(transport: str = 'grpc_asyncio', request_type=log # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -7740,6 +8418,7 @@ async def test__get_link_async(transport: str = 'grpc_asyncio', request_type=log async def test__get_link_async_from_dict(): await test__get_link_async(request_type=dict) + def test__get_link_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7749,12 +8428,10 @@ def test__get_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: call.return_value = logging_config.Link() client._get_link(request) @@ -7766,9 +8443,9 @@ def test__get_link_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -7781,12 +8458,10 @@ async def test__get_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) await client._get_link(request) @@ -7798,9 +8473,9 @@ async def test__get_link_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test__get_link_flattened(): @@ -7809,15 +8484,13 @@ def test__get_link_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Link() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._get_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7825,7 +8498,7 @@ def test__get_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7839,9 +8512,10 @@ def test__get_link_flattened_error(): with pytest.raises(ValueError): client._get_link( logging_config.GetLinkRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test__get_link_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -7849,9 +8523,7 @@ async def test__get_link_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Link() @@ -7859,7 +8531,7 @@ async def test__get_link_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._get_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7867,9 +8539,10 @@ async def test__get_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test__get_link_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -7881,15 +8554,18 @@ async def test__get_link_flattened_error_async(): with pytest.raises(ValueError): await client._get_link( logging_config.GetLinkRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.ListExclusionsRequest, - dict, -]) -def test__list_exclusions(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListExclusionsRequest, + dict, + ], +) +def test__list_exclusions(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7900,12 +8576,10 @@ def test__list_exclusions(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client._list_exclusions(request) @@ -7917,7 +8591,7 @@ def test__list_exclusions(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test__list_exclusions_non_empty_request_with_auto_populated_field(): @@ -7925,30 +8599,31 @@ def test__list_exclusions_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListExclusionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._list_exclusions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListExclusionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test__list_exclusions_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7967,7 +8642,9 @@ def test__list_exclusions_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_exclusions] = mock_rpc request = {} client._list_exclusions(request) @@ -7981,8 +8658,11 @@ def test__list_exclusions_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__list_exclusions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__list_exclusions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7996,12 +8676,17 @@ async def test__list_exclusions_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_exclusions in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_exclusions + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_exclusions] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_exclusions + ] = mock_rpc request = {} await client._list_exclusions(request) @@ -8015,8 +8700,11 @@ async def test__list_exclusions_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): +async def test__list_exclusions_async( + transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -8027,13 +8715,13 @@ async def test__list_exclusions_async(transport: str = 'grpc_asyncio', request_t request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client._list_exclusions(request) # Establish that the underlying gRPC stub method was called. @@ -8044,13 +8732,14 @@ async def test__list_exclusions_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test__list_exclusions_async_from_dict(): await test__list_exclusions_async(request_type=dict) + def test__list_exclusions_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8060,12 +8749,10 @@ def test__list_exclusions_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: call.return_value = logging_config.ListExclusionsResponse() client._list_exclusions(request) @@ -8077,9 +8764,9 @@ def test__list_exclusions_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -8092,13 +8779,13 @@ async def test__list_exclusions_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse() + ) await client._list_exclusions(request) # Establish that the underlying gRPC stub method was called. @@ -8109,9 +8796,9 @@ async def test__list_exclusions_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test__list_exclusions_flattened(): @@ -8120,15 +8807,13 @@ def test__list_exclusions_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._list_exclusions( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -8136,7 +8821,7 @@ def test__list_exclusions_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -8150,9 +8835,10 @@ def test__list_exclusions_flattened_error(): with pytest.raises(ValueError): client._list_exclusions( logging_config.ListExclusionsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test__list_exclusions_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -8160,17 +8846,17 @@ async def test__list_exclusions_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._list_exclusions( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -8178,9 +8864,10 @@ async def test__list_exclusions_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test__list_exclusions_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -8192,7 +8879,7 @@ async def test__list_exclusions_flattened_error_async(): with pytest.raises(ValueError): await client._list_exclusions( logging_config.ListExclusionsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -8203,9 +8890,7 @@ def test__list_exclusions_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListExclusionsResponse( @@ -8214,17 +8899,17 @@ def test__list_exclusions_pager(transport_name: str = "grpc"): logging_config.LogExclusion(), logging_config.LogExclusion(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListExclusionsResponse( exclusions=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListExclusionsResponse( exclusions=[ logging_config.LogExclusion(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -8239,9 +8924,7 @@ def test__list_exclusions_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client._list_exclusions(request={}, retry=retry, timeout=timeout) @@ -8251,8 +8934,9 @@ def test__list_exclusions_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in results) + assert all(isinstance(i, logging_config.LogExclusion) for i in results) + + def test__list_exclusions_pages(transport_name: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8260,9 +8944,7 @@ def test__list_exclusions_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListExclusionsResponse( @@ -8271,17 +8953,17 @@ def test__list_exclusions_pages(transport_name: str = "grpc"): logging_config.LogExclusion(), logging_config.LogExclusion(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListExclusionsResponse( exclusions=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListExclusionsResponse( exclusions=[ logging_config.LogExclusion(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -8292,9 +8974,10 @@ def test__list_exclusions_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client._list_exclusions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test__list_exclusions_async_pager(): client = BaseConfigServiceV2AsyncClient( @@ -8303,8 +8986,8 @@ async def test__list_exclusions_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListExclusionsResponse( @@ -8313,17 +8996,17 @@ async def test__list_exclusions_async_pager(): logging_config.LogExclusion(), logging_config.LogExclusion(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListExclusionsResponse( exclusions=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListExclusionsResponse( exclusions=[ logging_config.LogExclusion(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -8333,15 +9016,16 @@ async def test__list_exclusions_async_pager(): ), RuntimeError, ) - async_pager = await client._list_exclusions(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client._list_exclusions( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in responses) + assert all(isinstance(i, logging_config.LogExclusion) for i in responses) @pytest.mark.asyncio @@ -8352,8 +9036,8 @@ async def test__list_exclusions_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_exclusions), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_config.ListExclusionsResponse( @@ -8362,17 +9046,17 @@ async def test__list_exclusions_async_pages(): logging_config.LogExclusion(), logging_config.LogExclusion(), ], - next_page_token='abc', + next_page_token="abc", ), logging_config.ListExclusionsResponse( exclusions=[], - next_page_token='def', + next_page_token="def", ), logging_config.ListExclusionsResponse( exclusions=[ logging_config.LogExclusion(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -8385,18 +9069,22 @@ async def test__list_exclusions_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client._list_exclusions(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_config.GetExclusionRequest, - dict, -]) -def test__get_exclusion(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetExclusionRequest, + dict, + ], +) +def test__get_exclusion(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8407,14 +9095,12 @@ def test__get_exclusion(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client._get_exclusion(request) @@ -8427,9 +9113,9 @@ def test__get_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8438,28 +9124,29 @@ def test__get_exclusion_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._get_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetExclusionRequest( - name='name_value', + name="name_value", ) + def test__get_exclusion_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8478,7 +9165,9 @@ def test__get_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_exclusion] = mock_rpc request = {} client._get_exclusion(request) @@ -8492,8 +9181,11 @@ def test__get_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__get_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8507,12 +9199,17 @@ async def test__get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_exclusion in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_exclusion + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_exclusion] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_exclusion + ] = mock_rpc request = {} await client._get_exclusion(request) @@ -8526,8 +9223,11 @@ async def test__get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grp assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): +async def test__get_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -8538,16 +9238,16 @@ async def test__get_exclusion_async(transport: str = 'grpc_asyncio', request_typ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) response = await client._get_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -8558,9 +9258,9 @@ async def test__get_exclusion_async(transport: str = 'grpc_asyncio', request_typ # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8568,6 +9268,7 @@ async def test__get_exclusion_async(transport: str = 'grpc_asyncio', request_typ async def test__get_exclusion_async_from_dict(): await test__get_exclusion_async(request_type=dict) + def test__get_exclusion_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8577,12 +9278,10 @@ def test__get_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client._get_exclusion(request) @@ -8594,9 +9293,9 @@ def test__get_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -8609,13 +9308,13 @@ async def test__get_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) await client._get_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -8626,9 +9325,9 @@ async def test__get_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test__get_exclusion_flattened(): @@ -8637,15 +9336,13 @@ def test__get_exclusion_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._get_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -8653,7 +9350,7 @@ def test__get_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -8667,9 +9364,10 @@ def test__get_exclusion_flattened_error(): with pytest.raises(ValueError): client._get_exclusion( logging_config.GetExclusionRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test__get_exclusion_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -8677,17 +9375,17 @@ async def test__get_exclusion_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._get_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -8695,9 +9393,10 @@ async def test__get_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test__get_exclusion_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -8709,15 +9408,18 @@ async def test__get_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client._get_exclusion( logging_config.GetExclusionRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.CreateExclusionRequest, - dict, -]) -def test__create_exclusion(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateExclusionRequest, + dict, + ], +) +def test__create_exclusion(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8728,14 +9430,12 @@ def test__create_exclusion(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client._create_exclusion(request) @@ -8748,9 +9448,9 @@ def test__create_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8759,28 +9459,29 @@ def test__create_exclusion_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateExclusionRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._create_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateExclusionRequest( - parent='parent_value', + parent="parent_value", ) + def test__create_exclusion_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8799,8 +9500,12 @@ def test__create_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_exclusion] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_exclusion] = ( + mock_rpc + ) request = {} client._create_exclusion(request) @@ -8813,8 +9518,11 @@ def test__create_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__create_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__create_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -8828,12 +9536,17 @@ async def test__create_exclusion_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_exclusion in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_exclusion + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_exclusion] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_exclusion + ] = mock_rpc request = {} await client._create_exclusion(request) @@ -8847,8 +9560,11 @@ async def test__create_exclusion_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): +async def test__create_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -8859,16 +9575,16 @@ async def test__create_exclusion_async(transport: str = 'grpc_asyncio', request_ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) response = await client._create_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -8879,9 +9595,9 @@ async def test__create_exclusion_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8889,6 +9605,7 @@ async def test__create_exclusion_async(transport: str = 'grpc_asyncio', request_ async def test__create_exclusion_async_from_dict(): await test__create_exclusion_async(request_type=dict) + def test__create_exclusion_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8898,12 +9615,10 @@ def test__create_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client._create_exclusion(request) @@ -8915,9 +9630,9 @@ def test__create_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -8930,13 +9645,13 @@ async def test__create_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) await client._create_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -8947,9 +9662,9 @@ async def test__create_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test__create_exclusion_flattened(): @@ -8958,16 +9673,14 @@ def test__create_exclusion_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._create_exclusion( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -8975,10 +9688,10 @@ def test__create_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val @@ -8992,10 +9705,11 @@ def test__create_exclusion_flattened_error(): with pytest.raises(ValueError): client._create_exclusion( logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) + @pytest.mark.asyncio async def test__create_exclusion_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -9003,18 +9717,18 @@ async def test__create_exclusion_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._create_exclusion( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -9022,12 +9736,13 @@ async def test__create_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test__create_exclusion_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -9039,16 +9754,19 @@ async def test__create_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client._create_exclusion( logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateExclusionRequest, - dict, -]) -def test__update_exclusion(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateExclusionRequest, + dict, + ], +) +def test__update_exclusion(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9059,14 +9777,12 @@ def test__update_exclusion(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client._update_exclusion(request) @@ -9079,9 +9795,9 @@ def test__update_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -9090,28 +9806,29 @@ def test__update_exclusion_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._update_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateExclusionRequest( - name='name_value', + name="name_value", ) + def test__update_exclusion_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9130,8 +9847,12 @@ def test__update_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_exclusion] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_exclusion] = ( + mock_rpc + ) request = {} client._update_exclusion(request) @@ -9144,8 +9865,11 @@ def test__update_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__update_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -9159,12 +9883,17 @@ async def test__update_exclusion_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_exclusion in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_exclusion + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_exclusion] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_exclusion + ] = mock_rpc request = {} await client._update_exclusion(request) @@ -9178,8 +9907,11 @@ async def test__update_exclusion_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): +async def test__update_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -9190,16 +9922,16 @@ async def test__update_exclusion_async(transport: str = 'grpc_asyncio', request_ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) response = await client._update_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -9210,9 +9942,9 @@ async def test__update_exclusion_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -9220,6 +9952,7 @@ async def test__update_exclusion_async(transport: str = 'grpc_asyncio', request_ async def test__update_exclusion_async_from_dict(): await test__update_exclusion_async(request_type=dict) + def test__update_exclusion_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9229,12 +9962,10 @@ def test__update_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client._update_exclusion(request) @@ -9246,9 +9977,9 @@ def test__update_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -9261,13 +9992,13 @@ async def test__update_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) await client._update_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -9278,9 +10009,9 @@ async def test__update_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test__update_exclusion_flattened(): @@ -9289,17 +10020,15 @@ def test__update_exclusion_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._update_exclusion( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -9307,13 +10036,13 @@ def test__update_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -9327,11 +10056,12 @@ def test__update_exclusion_flattened_error(): with pytest.raises(ValueError): client._update_exclusion( logging_config.UpdateExclusionRequest(), - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test__update_exclusion_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -9339,19 +10069,19 @@ async def test__update_exclusion_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._update_exclusion( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -9359,15 +10089,16 @@ async def test__update_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test__update_exclusion_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -9379,17 +10110,20 @@ async def test__update_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client._update_exclusion( logging_config.UpdateExclusionRequest(), - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteExclusionRequest, - dict, -]) -def test__delete_exclusion(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteExclusionRequest, + dict, + ], +) +def test__delete_exclusion(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9400,9 +10134,7 @@ def test__delete_exclusion(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client._delete_exclusion(request) @@ -9422,28 +10154,29 @@ def test__delete_exclusion_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._delete_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteExclusionRequest( - name='name_value', + name="name_value", ) + def test__delete_exclusion_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9462,8 +10195,12 @@ def test__delete_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_exclusion] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_exclusion] = ( + mock_rpc + ) request = {} client._delete_exclusion(request) @@ -9476,8 +10213,11 @@ def test__delete_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__delete_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -9491,12 +10231,17 @@ async def test__delete_exclusion_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_exclusion in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_exclusion + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_exclusion] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_exclusion + ] = mock_rpc request = {} await client._delete_exclusion(request) @@ -9510,8 +10255,11 @@ async def test__delete_exclusion_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): +async def test__delete_exclusion_async( + transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -9522,9 +10270,7 @@ async def test__delete_exclusion_async(transport: str = 'grpc_asyncio', request_ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client._delete_exclusion(request) @@ -9543,6 +10289,7 @@ async def test__delete_exclusion_async(transport: str = 'grpc_asyncio', request_ async def test__delete_exclusion_async_from_dict(): await test__delete_exclusion_async(request_type=dict) + def test__delete_exclusion_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9552,12 +10299,10 @@ def test__delete_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = None client._delete_exclusion(request) @@ -9569,9 +10314,9 @@ def test__delete_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -9584,12 +10329,10 @@ async def test__delete_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_exclusion(request) @@ -9601,9 +10344,9 @@ async def test__delete_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test__delete_exclusion_flattened(): @@ -9612,15 +10355,13 @@ def test__delete_exclusion_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._delete_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -9628,7 +10369,7 @@ def test__delete_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -9642,9 +10383,10 @@ def test__delete_exclusion_flattened_error(): with pytest.raises(ValueError): client._delete_exclusion( logging_config.DeleteExclusionRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test__delete_exclusion_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -9652,9 +10394,7 @@ async def test__delete_exclusion_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -9662,7 +10402,7 @@ async def test__delete_exclusion_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._delete_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -9670,9 +10410,10 @@ async def test__delete_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test__delete_exclusion_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -9684,15 +10425,18 @@ async def test__delete_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client._delete_exclusion( logging_config.DeleteExclusionRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.GetCmekSettingsRequest, - dict, -]) -def test__get_cmek_settings(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetCmekSettingsRequest, + dict, + ], +) +def test__get_cmek_settings(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9704,14 +10448,14 @@ def test__get_cmek_settings(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: + type(client.transport.get_cmek_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", ) response = client._get_cmek_settings(request) @@ -9723,10 +10467,10 @@ def test__get_cmek_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" def test__get_cmek_settings_non_empty_request_with_auto_populated_field(): @@ -9734,28 +10478,31 @@ def test__get_cmek_settings_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetCmekSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._get_cmek_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetCmekSettingsRequest( - name='name_value', + name="name_value", ) + def test__get_cmek_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9774,8 +10521,12 @@ def test__get_cmek_settings_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_cmek_settings] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_cmek_settings] = ( + mock_rpc + ) request = {} client._get_cmek_settings(request) @@ -9788,8 +10539,11 @@ def test__get_cmek_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__get_cmek_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -9803,12 +10557,17 @@ async def test__get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_cmek_settings in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_cmek_settings + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_cmek_settings] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_cmek_settings + ] = mock_rpc request = {} await client._get_cmek_settings(request) @@ -9822,8 +10581,11 @@ async def test__get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): +async def test__get_cmek_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -9835,15 +10597,17 @@ async def test__get_cmek_settings_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) + type(client.transport.get_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) response = await client._get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -9854,16 +10618,17 @@ async def test__get_cmek_settings_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" @pytest.mark.asyncio async def test__get_cmek_settings_async_from_dict(): await test__get_cmek_settings_async(request_type=dict) + def test__get_cmek_settings_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9873,12 +10638,12 @@ def test__get_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: + type(client.transport.get_cmek_settings), "__call__" + ) as call: call.return_value = logging_config.CmekSettings() client._get_cmek_settings(request) @@ -9890,9 +10655,9 @@ def test__get_cmek_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -9905,13 +10670,15 @@ async def test__get_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + type(client.transport.get_cmek_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings() + ) await client._get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -9922,16 +10689,19 @@ async def test__get_cmek_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateCmekSettingsRequest, - dict, -]) -def test__update_cmek_settings(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateCmekSettingsRequest, + dict, + ], +) +def test__update_cmek_settings(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9943,14 +10713,14 @@ def test__update_cmek_settings(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: + type(client.transport.update_cmek_settings), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", ) response = client._update_cmek_settings(request) @@ -9962,10 +10732,10 @@ def test__update_cmek_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" def test__update_cmek_settings_non_empty_request_with_auto_populated_field(): @@ -9973,28 +10743,31 @@ def test__update_cmek_settings_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateCmekSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._update_cmek_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateCmekSettingsRequest( - name='name_value', + name="name_value", ) + def test__update_cmek_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10009,12 +10782,18 @@ def test__update_cmek_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_cmek_settings in client._transport._wrapped_methods + assert ( + client._transport.update_cmek_settings in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_cmek_settings] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_cmek_settings] = ( + mock_rpc + ) request = {} client._update_cmek_settings(request) @@ -10027,8 +10806,11 @@ def test__update_cmek_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__update_cmek_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10042,12 +10824,17 @@ async def test__update_cmek_settings_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_cmek_settings in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_cmek_settings + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_cmek_settings] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_cmek_settings + ] = mock_rpc request = {} await client._update_cmek_settings(request) @@ -10061,8 +10848,12 @@ async def test__update_cmek_settings_async_use_cached_wrapped_rpc(transport: str assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): +async def test__update_cmek_settings_async( + transport: str = "grpc_asyncio", + request_type=logging_config.UpdateCmekSettingsRequest, +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -10074,15 +10865,17 @@ async def test__update_cmek_settings_async(transport: str = 'grpc_asyncio', requ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) + type(client.transport.update_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) response = await client._update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10093,16 +10886,17 @@ async def test__update_cmek_settings_async(transport: str = 'grpc_asyncio', requ # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" @pytest.mark.asyncio async def test__update_cmek_settings_async_from_dict(): await test__update_cmek_settings_async(request_type=dict) + def test__update_cmek_settings_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10112,12 +10906,12 @@ def test__update_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: + type(client.transport.update_cmek_settings), "__call__" + ) as call: call.return_value = logging_config.CmekSettings() client._update_cmek_settings(request) @@ -10129,9 +10923,9 @@ def test__update_cmek_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -10144,13 +10938,15 @@ async def test__update_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + type(client.transport.update_cmek_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings() + ) await client._update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10161,16 +10957,19 @@ async def test__update_cmek_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [ - logging_config.GetSettingsRequest, - dict, -]) -def test__get_settings(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetSettingsRequest, + dict, + ], +) +def test__get_settings(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10181,15 +10980,13 @@ def test__get_settings(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, ) response = client._get_settings(request) @@ -10202,10 +10999,10 @@ def test__get_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10214,28 +11011,29 @@ def test__get_settings_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._get_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSettingsRequest( - name='name_value', + name="name_value", ) + def test__get_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10254,7 +11052,9 @@ def test__get_settings_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc request = {} client._get_settings(request) @@ -10268,8 +11068,11 @@ def test__get_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__get_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10283,12 +11086,17 @@ async def test__get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_settings in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_settings + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_settings] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_settings + ] = mock_rpc request = {} await client._get_settings(request) @@ -10302,8 +11110,11 @@ async def test__get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__get_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSettingsRequest): +async def test__get_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -10314,17 +11125,17 @@ async def test__get_settings_async(transport: str = 'grpc_asyncio', request_type request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) response = await client._get_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10335,10 +11146,10 @@ async def test__get_settings_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10346,6 +11157,7 @@ async def test__get_settings_async(transport: str = 'grpc_asyncio', request_type async def test__get_settings_async_from_dict(): await test__get_settings_async(request_type=dict) + def test__get_settings_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10355,12 +11167,10 @@ def test__get_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: call.return_value = logging_config.Settings() client._get_settings(request) @@ -10372,9 +11182,9 @@ def test__get_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -10387,13 +11197,13 @@ async def test__get_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) await client._get_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10404,9 +11214,9 @@ async def test__get_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test__get_settings_flattened(): @@ -10415,15 +11225,13 @@ def test__get_settings_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._get_settings( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -10431,7 +11239,7 @@ def test__get_settings_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -10445,9 +11253,10 @@ def test__get_settings_flattened_error(): with pytest.raises(ValueError): client._get_settings( logging_config.GetSettingsRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test__get_settings_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -10455,17 +11264,17 @@ async def test__get_settings_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._get_settings( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -10473,9 +11282,10 @@ async def test__get_settings_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test__get_settings_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -10487,15 +11297,18 @@ async def test__get_settings_flattened_error_async(): with pytest.raises(ValueError): await client._get_settings( logging_config.GetSettingsRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateSettingsRequest, - dict, -]) -def test__update_settings(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateSettingsRequest, + dict, + ], +) +def test__update_settings(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10506,15 +11319,13 @@ def test__update_settings(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, ) response = client._update_settings(request) @@ -10527,10 +11338,10 @@ def test__update_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10539,28 +11350,29 @@ def test__update_settings_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._update_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSettingsRequest( - name='name_value', + name="name_value", ) + def test__update_settings_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10579,7 +11391,9 @@ def test__update_settings_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc request = {} client._update_settings(request) @@ -10593,8 +11407,11 @@ def test__update_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__update_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10608,12 +11425,17 @@ async def test__update_settings_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_settings in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_settings + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_settings] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_settings + ] = mock_rpc request = {} await client._update_settings(request) @@ -10627,8 +11449,11 @@ async def test__update_settings_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSettingsRequest): +async def test__update_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -10639,17 +11464,17 @@ async def test__update_settings_async(transport: str = 'grpc_asyncio', request_t request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) response = await client._update_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10660,10 +11485,10 @@ async def test__update_settings_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10671,6 +11496,7 @@ async def test__update_settings_async(transport: str = 'grpc_asyncio', request_t async def test__update_settings_async_from_dict(): await test__update_settings_async(request_type=dict) + def test__update_settings_field_headers(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10680,12 +11506,10 @@ def test__update_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: call.return_value = logging_config.Settings() client._update_settings(request) @@ -10697,9 +11521,9 @@ def test__update_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -10712,13 +11536,13 @@ async def test__update_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) await client._update_settings(request) # Establish that the underlying gRPC stub method was called. @@ -10729,9 +11553,9 @@ async def test__update_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test__update_settings_flattened(): @@ -10740,16 +11564,14 @@ def test__update_settings_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._update_settings( - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -10757,10 +11579,10 @@ def test__update_settings_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].settings - mock_val = logging_config.Settings(name='name_value') + mock_val = logging_config.Settings(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -10774,10 +11596,11 @@ def test__update_settings_flattened_error(): with pytest.raises(ValueError): client._update_settings( logging_config.UpdateSettingsRequest(), - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + @pytest.mark.asyncio async def test__update_settings_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -10785,18 +11608,18 @@ async def test__update_settings_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._update_settings( - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -10804,12 +11627,13 @@ async def test__update_settings_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].settings - mock_val = logging_config.Settings(name='name_value') + mock_val = logging_config.Settings(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val + @pytest.mark.asyncio async def test__update_settings_flattened_error_async(): client = BaseConfigServiceV2AsyncClient( @@ -10821,16 +11645,19 @@ async def test__update_settings_flattened_error_async(): with pytest.raises(ValueError): await client._update_settings( logging_config.UpdateSettingsRequest(), - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -@pytest.mark.parametrize("request_type", [ - logging_config.CopyLogEntriesRequest, - dict, -]) -def test__copy_log_entries(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CopyLogEntriesRequest, + dict, + ], +) +def test__copy_log_entries(request_type, transport: str = "grpc"): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10841,11 +11668,9 @@ def test__copy_log_entries(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client._copy_log_entries(request) # Establish that the underlying gRPC stub method was called. @@ -10863,32 +11688,33 @@ def test__copy_log_entries_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CopyLogEntriesRequest( - name='name_value', - filter='filter_value', - destination='destination_value', + name="name_value", + filter="filter_value", + destination="destination_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._copy_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CopyLogEntriesRequest( - name='name_value', - filter='filter_value', - destination='destination_value', + name="name_value", + filter="filter_value", + destination="destination_value", ) + def test__copy_log_entries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10907,8 +11733,12 @@ def test__copy_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.copy_log_entries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.copy_log_entries] = ( + mock_rpc + ) request = {} client._copy_log_entries(request) @@ -10926,8 +11756,11 @@ def test__copy_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__copy_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -10941,12 +11774,17 @@ async def test__copy_log_entries_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.copy_log_entries in client._client._transport._wrapped_methods + assert ( + client._client._transport.copy_log_entries + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.copy_log_entries] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.copy_log_entries + ] = mock_rpc request = {} await client._copy_log_entries(request) @@ -10965,8 +11803,11 @@ async def test__copy_log_entries_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): +async def test__copy_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest +): client = BaseConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -10977,12 +11818,10 @@ async def test__copy_log_entries_async(transport: str = 'grpc_asyncio', request_ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client._copy_log_entries(request) @@ -11039,8 +11878,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = BaseConfigServiceV2Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -11062,6 +11900,7 @@ def test_transport_instance(): client = BaseConfigServiceV2Client(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( @@ -11076,17 +11915,22 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = BaseConfigServiceV2Client.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -11096,8 +11940,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = BaseConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -11111,9 +11954,7 @@ def test_list_buckets_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: call.return_value = logging_config.ListBucketsResponse() client.list_buckets(request=None) @@ -11134,9 +11975,7 @@ def test_get_bucket_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.get_bucket(request=None) @@ -11158,9 +11997,9 @@ def test_create_bucket_async_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.create_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_bucket_async(request=None) # Establish that the underlying stub method was called. @@ -11181,9 +12020,9 @@ def test_update_bucket_async_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.update_bucket_async), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_bucket_async(request=None) # Establish that the underlying stub method was called. @@ -11203,9 +12042,7 @@ def test_create_bucket_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.create_bucket(request=None) @@ -11226,9 +12063,7 @@ def test_update_bucket_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() client.update_bucket(request=None) @@ -11249,9 +12084,7 @@ def test_delete_bucket_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = None client.delete_bucket(request=None) @@ -11272,9 +12105,7 @@ def test_undelete_bucket_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = None client.undelete_bucket(request=None) @@ -11295,9 +12126,7 @@ def test__list_views_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: call.return_value = logging_config.ListViewsResponse() client._list_views(request=None) @@ -11318,9 +12147,7 @@ def test__get_view_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: + with mock.patch.object(type(client.transport.get_view), "__call__") as call: call.return_value = logging_config.LogView() client._get_view(request=None) @@ -11341,9 +12168,7 @@ def test__create_view_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: + with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value = logging_config.LogView() client._create_view(request=None) @@ -11364,9 +12189,7 @@ def test__update_view_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: + with mock.patch.object(type(client.transport.update_view), "__call__") as call: call.return_value = logging_config.LogView() client._update_view(request=None) @@ -11387,9 +12210,7 @@ def test__delete_view_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = None client._delete_view(request=None) @@ -11410,9 +12231,7 @@ def test__list_sinks_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: call.return_value = logging_config.ListSinksResponse() client._list_sinks(request=None) @@ -11433,9 +12252,7 @@ def test__get_sink_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: call.return_value = logging_config.LogSink() client._get_sink(request=None) @@ -11456,9 +12273,7 @@ def test__create_sink_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: call.return_value = logging_config.LogSink() client._create_sink(request=None) @@ -11479,9 +12294,7 @@ def test__update_sink_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: call.return_value = logging_config.LogSink() client._update_sink(request=None) @@ -11502,9 +12315,7 @@ def test__delete_sink_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = None client._delete_sink(request=None) @@ -11525,10 +12336,8 @@ def test__create_link_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client._create_link(request=None) # Establish that the underlying stub method was called. @@ -11548,10 +12357,8 @@ def test__delete_link_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client._delete_link(request=None) # Establish that the underlying stub method was called. @@ -11571,9 +12378,7 @@ def test__list_links_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: call.return_value = logging_config.ListLinksResponse() client._list_links(request=None) @@ -11594,9 +12399,7 @@ def test__get_link_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: call.return_value = logging_config.Link() client._get_link(request=None) @@ -11617,9 +12420,7 @@ def test__list_exclusions_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: call.return_value = logging_config.ListExclusionsResponse() client._list_exclusions(request=None) @@ -11640,9 +12441,7 @@ def test__get_exclusion_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client._get_exclusion(request=None) @@ -11663,9 +12462,7 @@ def test__create_exclusion_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client._create_exclusion(request=None) @@ -11686,9 +12483,7 @@ def test__update_exclusion_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() client._update_exclusion(request=None) @@ -11709,9 +12504,7 @@ def test__delete_exclusion_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = None client._delete_exclusion(request=None) @@ -11733,8 +12526,8 @@ def test__get_cmek_settings_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: + type(client.transport.get_cmek_settings), "__call__" + ) as call: call.return_value = logging_config.CmekSettings() client._get_cmek_settings(request=None) @@ -11756,8 +12549,8 @@ def test__update_cmek_settings_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: + type(client.transport.update_cmek_settings), "__call__" + ) as call: call.return_value = logging_config.CmekSettings() client._update_cmek_settings(request=None) @@ -11778,9 +12571,7 @@ def test__get_settings_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: call.return_value = logging_config.Settings() client._get_settings(request=None) @@ -11801,9 +12592,7 @@ def test__update_settings_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: call.return_value = logging_config.Settings() client._update_settings(request=None) @@ -11824,10 +12613,8 @@ def test__copy_log_entries_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client._copy_log_entries(request=None) # Establish that the underlying stub method was called. @@ -11847,8 +12634,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = BaseConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -11863,13 +12649,13 @@ async def test_list_buckets_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: + with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListBucketsResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_buckets(request=None) # Establish that the underlying stub method was called. @@ -11890,19 +12676,19 @@ async def test_get_bucket_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) await client.get_bucket(request=None) # Establish that the underlying stub method was called. @@ -11924,11 +12710,11 @@ async def test_create_bucket_async_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: + type(client.transport.create_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.create_bucket_async(request=None) @@ -11951,11 +12737,11 @@ async def test_update_bucket_async_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: + type(client.transport.update_bucket_async), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.update_bucket_async(request=None) @@ -11977,19 +12763,19 @@ async def test_create_bucket_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) await client.create_bucket(request=None) # Establish that the underlying stub method was called. @@ -12010,19 +12796,19 @@ async def test_update_bucket_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogBucket( + name="name_value", + description="description_value", + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=["restricted_fields_value"], + ) + ) await client.update_bucket(request=None) # Establish that the underlying stub method was called. @@ -12043,9 +12829,7 @@ async def test_delete_bucket_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_bucket(request=None) @@ -12068,9 +12852,7 @@ async def test_undelete_bucket_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: + with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.undelete_bucket(request=None) @@ -12093,13 +12875,13 @@ async def test__list_views_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: + with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListViewsResponse( + next_page_token="next_page_token_value", + ) + ) await client._list_views(request=None) # Establish that the underlying stub method was called. @@ -12120,15 +12902,15 @@ async def test__get_view_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.get_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) await client._get_view(request=None) # Establish that the underlying stub method was called. @@ -12149,15 +12931,15 @@ async def test__create_view_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.create_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) await client._create_view(request=None) # Establish that the underlying stub method was called. @@ -12178,15 +12960,15 @@ async def test__update_view_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) + with mock.patch.object(type(client.transport.update_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogView( + name="name_value", + description="description_value", + filter="filter_value", + ) + ) await client._update_view(request=None) # Establish that the underlying stub method was called. @@ -12207,9 +12989,7 @@ async def test__delete_view_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_view(request=None) @@ -12232,13 +13012,13 @@ async def test__list_sinks_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: + with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListSinksResponse( + next_page_token="next_page_token_value", + ) + ) await client._list_sinks(request=None) # Establish that the underlying stub method was called. @@ -12259,20 +13039,20 @@ async def test__get_sink_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) await client._get_sink(request=None) # Establish that the underlying stub method was called. @@ -12293,20 +13073,20 @@ async def test__create_sink_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) await client._create_sink(request=None) # Establish that the underlying stub method was called. @@ -12327,20 +13107,20 @@ async def test__update_sink_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) + with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogSink( + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity="writer_identity_value", + include_children=True, + ) + ) await client._update_sink(request=None) # Establish that the underlying stub method was called. @@ -12361,9 +13141,7 @@ async def test__delete_sink_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_sink(request=None) @@ -12386,12 +13164,10 @@ async def test__create_link_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: + with mock.patch.object(type(client.transport.create_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client._create_link(request=None) @@ -12413,12 +13189,10 @@ async def test__delete_link_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_link), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client._delete_link(request=None) @@ -12440,13 +13214,13 @@ async def test__list_links_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: + with mock.patch.object(type(client.transport.list_links), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListLinksResponse( + next_page_token="next_page_token_value", + ) + ) await client._list_links(request=None) # Establish that the underlying stub method was called. @@ -12467,15 +13241,15 @@ async def test__get_link_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: + with mock.patch.object(type(client.transport.get_link), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( - name='name_value', - description='description_value', - lifecycle_state=logging_config.LifecycleState.ACTIVE, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Link( + name="name_value", + description="description_value", + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + ) await client._get_link(request=None) # Establish that the underlying stub method was called. @@ -12496,13 +13270,13 @@ async def test__list_exclusions_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: + with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.ListExclusionsResponse( + next_page_token="next_page_token_value", + ) + ) await client._list_exclusions(request=None) # Establish that the underlying stub method was called. @@ -12523,16 +13297,16 @@ async def test__get_exclusion_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) await client._get_exclusion(request=None) # Establish that the underlying stub method was called. @@ -12553,16 +13327,16 @@ async def test__create_exclusion_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) await client._create_exclusion(request=None) # Establish that the underlying stub method was called. @@ -12583,16 +13357,16 @@ async def test__update_exclusion_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.LogExclusion( + name="name_value", + description="description_value", + filter="filter_value", + disabled=True, + ) + ) await client._update_exclusion(request=None) # Establish that the underlying stub method was called. @@ -12613,9 +13387,7 @@ async def test__delete_exclusion_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_exclusion(request=None) @@ -12639,15 +13411,17 @@ async def test__get_cmek_settings_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) + type(client.transport.get_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) await client._get_cmek_settings(request=None) # Establish that the underlying stub method was called. @@ -12669,15 +13443,17 @@ async def test__update_cmek_settings_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) + type(client.transport.update_cmek_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.CmekSettings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", + ) + ) await client._update_cmek_settings(request=None) # Establish that the underlying stub method was called. @@ -12698,17 +13474,17 @@ async def test__get_settings_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) await client._get_settings(request=None) # Establish that the underlying stub method was called. @@ -12729,17 +13505,17 @@ async def test__update_settings_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) await client._update_settings(request=None) # Establish that the underlying stub method was called. @@ -12760,12 +13536,10 @@ async def test__copy_log_entries_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client._copy_log_entries(request=None) @@ -12787,18 +13561,21 @@ def test_transport_grpc_default(): transports.ConfigServiceV2GrpcTransport, ) + def test_config_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.ConfigServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_config_service_v2_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__') as Transport: + with mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__" + ) as Transport: Transport.return_value = None transport = transports.ConfigServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), @@ -12807,41 +13584,41 @@ def test_config_service_v2_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_buckets', - 'get_bucket', - 'create_bucket_async', - 'update_bucket_async', - 'create_bucket', - 'update_bucket', - 'delete_bucket', - 'undelete_bucket', - 'list_views', - 'get_view', - 'create_view', - 'update_view', - 'delete_view', - 'list_sinks', - 'get_sink', - 'create_sink', - 'update_sink', - 'delete_sink', - 'create_link', - 'delete_link', - 'list_links', - 'get_link', - 'list_exclusions', - 'get_exclusion', - 'create_exclusion', - 'update_exclusion', - 'delete_exclusion', - 'get_cmek_settings', - 'update_cmek_settings', - 'get_settings', - 'update_settings', - 'copy_log_entries', - 'get_operation', - 'cancel_operation', - 'list_operations', + "list_buckets", + "get_bucket", + "create_bucket_async", + "update_bucket_async", + "create_bucket", + "update_bucket", + "delete_bucket", + "undelete_bucket", + "list_views", + "get_view", + "create_view", + "update_view", + "delete_view", + "list_sinks", + "get_sink", + "create_sink", + "update_sink", + "delete_sink", + "create_link", + "delete_link", + "list_links", + "get_link", + "list_exclusions", + "get_exclusion", + "create_exclusion", + "update_exclusion", + "delete_exclusion", + "get_cmek_settings", + "update_cmek_settings", + "get_settings", + "update_settings", + "copy_log_entries", + "get_operation", + "cancel_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -12857,7 +13634,7 @@ def test_config_service_v2_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -12866,28 +13643,41 @@ def test_config_service_v2_base_transport(): def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), quota_project_id="octopus", ) def test_config_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport() @@ -12896,17 +13686,17 @@ def test_config_service_v2_base_transport_with_adc(): def test_config_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) BaseConfigServiceV2Client() adc.assert_called_once_with( scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), quota_project_id=None, ) @@ -12921,12 +13711,17 @@ def test_config_service_v2_auth_adc(): def test_config_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), quota_project_id="octopus", ) @@ -12939,39 +13734,39 @@ def test_config_service_v2_transport_auth_adc(transport_class): ], ) def test_config_service_v2_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.ConfigServiceV2GrpcTransport, grpc_helpers), - (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "logging.googleapis.com:443", @@ -12979,11 +13774,11 @@ def test_config_service_v2_transport_create_channel(transport_class, grpc_helper credentials_file=None, quota_project_id="octopus", default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), scopes=["1", "2"], default_host="logging.googleapis.com", ssl_credentials=None, @@ -12994,10 +13789,14 @@ def test_config_service_v2_transport_create_channel(transport_class, grpc_helper ) -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) -def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -13006,7 +13805,7 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -13027,45 +13826,52 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_config_service_v2_host_no_port(transport_name): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'logging.googleapis.com:443' + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com" + ), + transport=transport_name, ) + assert client.transport._host == ("logging.googleapis.com:443") + -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_config_service_v2_host_with_port(transport_name): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com:8000" + ), transport=transport_name, ) - assert client.transport._host == ( - 'logging.googleapis.com:8000' - ) + assert client.transport._host == ("logging.googleapis.com:8000") + def test_config_service_v2_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.ConfigServiceV2GrpcTransport( @@ -13078,7 +13884,7 @@ def test_config_service_v2_grpc_transport_channel(): def test_config_service_v2_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.ConfigServiceV2GrpcAsyncIOTransport( @@ -13093,12 +13899,22 @@ def test_config_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) def test_config_service_v2_transport_channel_mtls_with_client_cert_source( - transport_class + transport_class, ): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -13107,7 +13923,7 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -13137,17 +13953,23 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) -def test_config_service_v2_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -13178,7 +14000,7 @@ def test_config_service_v2_transport_channel_mtls_with_adc( def test_config_service_v2_grpc_lro_client(): client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) transport = client.transport @@ -13195,7 +14017,7 @@ def test_config_service_v2_grpc_lro_client(): def test_config_service_v2_grpc_lro_async_client(): client = BaseConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', + transport="grpc_asyncio", ) transport = client.transport @@ -13211,7 +14033,9 @@ def test_config_service_v2_grpc_lro_async_client(): def test_cmek_settings_path(): project = "squid" - expected = "projects/{project}/cmekSettings".format(project=project, ) + expected = "projects/{project}/cmekSettings".format( + project=project, + ) actual = BaseConfigServiceV2Client.cmek_settings_path(project) assert expected == actual @@ -13226,12 +14050,20 @@ def test_parse_cmek_settings_path(): actual = BaseConfigServiceV2Client.parse_cmek_settings_path(path) assert expected == actual + def test_link_path(): project = "whelk" location = "octopus" bucket = "oyster" link = "nudibranch" - expected = "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + expected = ( + "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format( + project=project, + location=location, + bucket=bucket, + link=link, + ) + ) actual = BaseConfigServiceV2Client.link_path(project, location, bucket, link) assert expected == actual @@ -13249,11 +14081,16 @@ def test_parse_link_path(): actual = BaseConfigServiceV2Client.parse_link_path(path) assert expected == actual + def test_log_bucket_path(): project = "scallop" location = "abalone" bucket = "squid" - expected = "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( + project=project, + location=location, + bucket=bucket, + ) actual = BaseConfigServiceV2Client.log_bucket_path(project, location, bucket) assert expected == actual @@ -13270,10 +14107,14 @@ def test_parse_log_bucket_path(): actual = BaseConfigServiceV2Client.parse_log_bucket_path(path) assert expected == actual + def test_log_exclusion_path(): project = "oyster" exclusion = "nudibranch" - expected = "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + expected = "projects/{project}/exclusions/{exclusion}".format( + project=project, + exclusion=exclusion, + ) actual = BaseConfigServiceV2Client.log_exclusion_path(project, exclusion) assert expected == actual @@ -13289,10 +14130,14 @@ def test_parse_log_exclusion_path(): actual = BaseConfigServiceV2Client.parse_log_exclusion_path(path) assert expected == actual + def test_log_sink_path(): project = "winkle" sink = "nautilus" - expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + expected = "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) actual = BaseConfigServiceV2Client.log_sink_path(project, sink) assert expected == actual @@ -13308,12 +14153,20 @@ def test_parse_log_sink_path(): actual = BaseConfigServiceV2Client.parse_log_sink_path(path) assert expected == actual + def test_log_view_path(): project = "squid" location = "clam" bucket = "whelk" view = "octopus" - expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + expected = ( + "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, + location=location, + bucket=bucket, + view=view, + ) + ) actual = BaseConfigServiceV2Client.log_view_path(project, location, bucket, view) assert expected == actual @@ -13331,9 +14184,12 @@ def test_parse_log_view_path(): actual = BaseConfigServiceV2Client.parse_log_view_path(path) assert expected == actual + def test_settings_path(): project = "winkle" - expected = "projects/{project}/settings".format(project=project, ) + expected = "projects/{project}/settings".format( + project=project, + ) actual = BaseConfigServiceV2Client.settings_path(project) assert expected == actual @@ -13348,9 +14204,12 @@ def test_parse_settings_path(): actual = BaseConfigServiceV2Client.parse_settings_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = BaseConfigServiceV2Client.common_billing_account_path(billing_account) assert expected == actual @@ -13365,9 +14224,12 @@ def test_parse_common_billing_account_path(): actual = BaseConfigServiceV2Client.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = BaseConfigServiceV2Client.common_folder_path(folder) assert expected == actual @@ -13382,9 +14244,12 @@ def test_parse_common_folder_path(): actual = BaseConfigServiceV2Client.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = BaseConfigServiceV2Client.common_organization_path(organization) assert expected == actual @@ -13399,9 +14264,12 @@ def test_parse_common_organization_path(): actual = BaseConfigServiceV2Client.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "oyster" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = BaseConfigServiceV2Client.common_project_path(project) assert expected == actual @@ -13416,10 +14284,14 @@ def test_parse_common_project_path(): actual = BaseConfigServiceV2Client.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "cuttlefish" location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = BaseConfigServiceV2Client.common_location_path(project, location) assert expected == actual @@ -13439,14 +14311,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.ConfigServiceV2Transport, "_prep_wrapped_messages" + ) as prep: client = BaseConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.ConfigServiceV2Transport, "_prep_wrapped_messages" + ) as prep: transport_class = BaseConfigServiceV2Client.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -13457,7 +14333,8 @@ def test_client_with_default_client_info(): def test_cancel_operation(transport: str = "grpc"): client = BaseConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13477,10 +14354,12 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = BaseConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13490,9 +14369,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13515,7 +14392,7 @@ def test_cancel_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. @@ -13525,7 +14402,11 @@ def test_cancel_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): @@ -13540,9 +14421,7 @@ async def test_cancel_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13551,7 +14430,10 @@ async def test_cancel_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_cancel_operation_from_dict(): @@ -13570,6 +14452,7 @@ def test_cancel_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = BaseConfigServiceV2AsyncClient( @@ -13578,9 +14461,7 @@ async def test_cancel_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation( request={ "name": "locations", @@ -13604,6 +14485,7 @@ def test_cancel_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.CancelOperationRequest() + @pytest.mark.asyncio async def test_cancel_operation_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -13612,9 +14494,7 @@ async def test_cancel_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13624,7 +14504,8 @@ async def test_cancel_operation_flattened_async(): def test_get_operation(transport: str = "grpc"): client = BaseConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13644,10 +14525,12 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = BaseConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13692,7 +14575,11 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): @@ -13718,7 +14605,10 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_get_operation_from_dict(): @@ -13737,6 +14627,7 @@ def test_get_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = BaseConfigServiceV2AsyncClient( @@ -13771,6 +14662,7 @@ def test_get_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.GetOperationRequest() + @pytest.mark.asyncio async def test_get_operation_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -13791,7 +14683,8 @@ async def test_get_operation_flattened_async(): def test_list_operations(transport: str = "grpc"): client = BaseConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13811,10 +14704,12 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = BaseConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13859,7 +14754,11 @@ def test_list_operations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_operations_field_headers_async(): @@ -13885,7 +14784,10 @@ async def test_list_operations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_operations_from_dict(): @@ -13904,6 +14806,7 @@ def test_list_operations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = BaseConfigServiceV2AsyncClient( @@ -13938,6 +14841,7 @@ def test_list_operations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.ListOperationsRequest() + @pytest.mark.asyncio async def test_list_operations_flattened_async(): client = BaseConfigServiceV2AsyncClient( @@ -13958,10 +14862,11 @@ async def test_list_operations_flattened_async(): def test_transport_close_grpc(): client = BaseConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -13970,10 +14875,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = BaseConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -13981,12 +14887,11 @@ async def test_transport_close_grpc_asyncio(): def test_client_ctx(): transports = [ - 'grpc', + "grpc", ] for transport in transports: client = BaseConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -13995,10 +14900,17 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport), - (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport), + ( + BaseConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -14013,7 +14925,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e03a5e8bb8d2..7052b092f8ac 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,39 +22,24 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio import json import math +from collections.abc import Mapping, Sequence + +import grpc import pytest -from collections.abc import Sequence, Mapping from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from grpc.experimental import aio from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2AsyncClient -from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client -from google.cloud.logging_v2.services.logging_service_v2 import pagers -from google.cloud.logging_v2.services.logging_service_v2 import transports -from google.cloud.logging_v2.types import log_entry -from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account import google.api.monitored_resource_pb2 as monitored_resource_pb2 # type: ignore import google.auth import google.logging.type.http_request_pb2 as http_request_pb2 # type: ignore @@ -62,8 +48,26 @@ import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.struct_pb2 as struct_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - - +from google.api_core import ( + client_options, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.logging_service_v2 import ( + LoggingServiceV2AsyncClient, + LoggingServiceV2Client, + pagers, + transports, +) +from google.cloud.logging_v2.types import log_entry, logging +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -78,9 +82,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -88,17 +94,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -110,21 +126,48 @@ def test__get_default_mtls_endpoint(): custom_endpoint = ".custom" assert LoggingServiceV2Client._get_default_mtls_endpoint(None) is None - assert LoggingServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - assert LoggingServiceV2Client._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + assert ( + LoggingServiceV2Client._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + def test__read_environment_variables(): assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert LoggingServiceV2Client._read_environment_variables() == (True, "auto", None) + assert LoggingServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} @@ -138,27 +181,46 @@ def test__read_environment_variables(): ) else: assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert LoggingServiceV2Client._read_environment_variables() == ( False, - "auto", + "never", None, ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "never", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "always", None) + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: LoggingServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -167,7 +229,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert LoggingServiceV2Client._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -175,7 +239,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert LoggingServiceV2Client._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -187,7 +253,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert LoggingServiceV2Client._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -199,7 +267,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert LoggingServiceV2Client._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -211,7 +281,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert LoggingServiceV2Client._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -226,83 +298,167 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): LoggingServiceV2Client._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert LoggingServiceV2Client._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert LoggingServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert LoggingServiceV2Client._get_client_cert_source(None, False) is None - assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None - assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + LoggingServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + LoggingServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert LoggingServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source - assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert LoggingServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert LoggingServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + LoggingServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "always") + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + LoggingServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert LoggingServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert LoggingServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert LoggingServiceV2Client._get_universe_domain(None, None) == LoggingServiceV2Client._DEFAULT_UNIVERSE + assert ( + LoggingServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + LoggingServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + LoggingServiceV2Client._get_universe_domain(None, None) + == LoggingServiceV2Client._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: LoggingServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -318,7 +474,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -331,59 +488,83 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (LoggingServiceV2Client, "grpc"), - (LoggingServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_logging_service_v2_client_from_service_account_info(client_class, transport_name): + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_logging_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.LoggingServiceV2GrpcTransport, "grpc"), - (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_logging_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.LoggingServiceV2GrpcTransport, "grpc"), + (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_logging_service_v2_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (LoggingServiceV2Client, "grpc"), - (LoggingServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_logging_service_v2_client_from_service_account_file(client_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_logging_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") def test_logging_service_v2_client_get_transport_class(): @@ -397,29 +578,44 @@ def test_logging_service_v2_client_get_transport_class(): assert transport == transports.LoggingServiceV2GrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) -def test_logging_service_v2_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) +def test_logging_service_v2_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(LoggingServiceV2Client, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: + with mock.patch.object(LoggingServiceV2Client, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -437,13 +633,15 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -455,7 +653,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -475,17 +673,22 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -494,46 +697,90 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" + api_audience="https://language.googleapis.com", ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "true"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "false"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + "true", + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + "false", + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_logging_service_v2_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -552,12 +799,22 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -578,15 +835,22 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -596,19 +860,31 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, ) -@pytest.mark.parametrize("client_class", [ - LoggingServiceV2Client, LoggingServiceV2AsyncClient -]) -@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +@pytest.mark.parametrize( + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient] +) +@mock.patch.object( + LoggingServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2AsyncClient), +) def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -616,18 +892,25 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -664,23 +947,23 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -711,23 +994,23 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -743,16 +1026,27 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -762,27 +1056,50 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) -@pytest.mark.parametrize("client_class", [ - LoggingServiceV2Client, LoggingServiceV2AsyncClient -]) -@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) + +@pytest.mark.parametrize( + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient] +) +@mock.patch.object( + LoggingServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(LoggingServiceV2AsyncClient), +) def test_logging_service_v2_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -805,11 +1122,19 @@ def test_logging_service_v2_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -817,26 +1142,39 @@ def test_logging_service_v2_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_logging_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_logging_service_v2_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -845,23 +1183,39 @@ def test_logging_service_v2_client_client_options_scopes(client_class, transport api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", grpc_helpers), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_logging_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_logging_service_v2_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -870,11 +1224,14 @@ def test_logging_service_v2_client_client_options_credentials_file(client_class, api_audience=None, ) + def test_logging_service_v2_client_client_options_from_dict(): - with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None client = LoggingServiceV2Client( - client_options={'api_endpoint': 'squid.clam.whelk'} + client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( credentials=None, @@ -889,23 +1246,38 @@ def test_logging_service_v2_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", grpc_helpers), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_logging_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_logging_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -915,13 +1287,13 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -933,12 +1305,12 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, credentials_file=None, quota_project_id=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), scopes=None, default_host="logging.googleapis.com", ssl_credentials=None, @@ -949,11 +1321,14 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, ) -@pytest.mark.parametrize("request_type", [ - logging.DeleteLogRequest, - dict, -]) -def test_delete_log(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging.DeleteLogRequest, + dict, + ], +) +def test_delete_log(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -964,9 +1339,7 @@ def test_delete_log(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None response = client.delete_log(request) @@ -986,28 +1359,29 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.DeleteLogRequest( - log_name='log_name_value', + log_name="log_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_log(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.DeleteLogRequest( - log_name='log_name_value', + log_name="log_name_value", ) + def test_delete_log_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1026,7 +1400,9 @@ def test_delete_log_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_log] = mock_rpc request = {} client.delete_log(request) @@ -1040,6 +1416,7 @@ def test_delete_log_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1055,12 +1432,17 @@ async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_as wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_log in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_log + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_log] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_log + ] = mock_rpc request = {} await client.delete_log(request) @@ -1074,8 +1456,11 @@ async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): +async def test_delete_log_async( + transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1086,9 +1471,7 @@ async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=lo request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_log(request) @@ -1107,6 +1490,7 @@ async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=lo async def test_delete_log_async_from_dict(): await test_delete_log_async(request_type=dict) + def test_delete_log_field_headers(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1116,12 +1500,10 @@ def test_delete_log_field_headers(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = 'log_name_value' + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = None client.delete_log(request) @@ -1133,9 +1515,9 @@ def test_delete_log_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'log_name=log_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "log_name=log_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1148,12 +1530,10 @@ async def test_delete_log_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = 'log_name_value' + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_log(request) @@ -1165,9 +1545,9 @@ async def test_delete_log_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'log_name=log_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "log_name=log_name_value", + ) in kw["metadata"] def test_delete_log_flattened(): @@ -1176,15 +1556,13 @@ def test_delete_log_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log( - log_name='log_name_value', + log_name="log_name_value", ) # Establish that the underlying call was made with the expected @@ -1192,7 +1570,7 @@ def test_delete_log_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val @@ -1206,9 +1584,10 @@ def test_delete_log_flattened_error(): with pytest.raises(ValueError): client.delete_log( logging.DeleteLogRequest(), - log_name='log_name_value', + log_name="log_name_value", ) + @pytest.mark.asyncio async def test_delete_log_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -1216,9 +1595,7 @@ async def test_delete_log_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1226,7 +1603,7 @@ async def test_delete_log_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_log( - log_name='log_name_value', + log_name="log_name_value", ) # Establish that the underlying call was made with the expected @@ -1234,9 +1611,10 @@ async def test_delete_log_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_log_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -1248,15 +1626,18 @@ async def test_delete_log_flattened_error_async(): with pytest.raises(ValueError): await client.delete_log( logging.DeleteLogRequest(), - log_name='log_name_value', + log_name="log_name_value", ) -@pytest.mark.parametrize("request_type", [ - logging.WriteLogEntriesRequest, - dict, -]) -def test_write_log_entries(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging.WriteLogEntriesRequest, + dict, + ], +) +def test_write_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1268,11 +1649,10 @@ def test_write_log_entries(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = logging.WriteLogEntriesResponse( - ) + call.return_value = logging.WriteLogEntriesResponse() response = client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. @@ -1290,28 +1670,31 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.WriteLogEntriesRequest( - log_name='log_name_value', + log_name="log_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.write_log_entries), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.write_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.WriteLogEntriesRequest( - log_name='log_name_value', + log_name="log_name_value", ) + def test_write_log_entries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1330,8 +1713,12 @@ def test_write_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.write_log_entries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.write_log_entries] = ( + mock_rpc + ) request = {} client.write_log_entries(request) @@ -1344,8 +1731,11 @@ def test_write_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_write_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1359,12 +1749,17 @@ async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.write_log_entries in client._client._transport._wrapped_methods + assert ( + client._client._transport.write_log_entries + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.write_log_entries] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.write_log_entries + ] = mock_rpc request = {} await client.write_log_entries(request) @@ -1378,8 +1773,11 @@ async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): +async def test_write_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1391,11 +1789,12 @@ async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) response = await client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. @@ -1420,17 +1819,17 @@ def test_write_log_entries_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging.WriteLogEntriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.write_log_entries( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) # Establish that the underlying call was made with the expected @@ -1438,16 +1837,16 @@ def test_write_log_entries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') + mock_val = monitored_resource_pb2.MonitoredResource(type="type_value") assert arg == mock_val arg = args[0].labels - mock_val = {'key_value': 'value_value'} + mock_val = {"key_value": "value_value"} assert arg == mock_val arg = args[0].entries - mock_val = [log_entry.LogEntry(log_name='log_name_value')] + mock_val = [log_entry.LogEntry(log_name="log_name_value")] assert arg == mock_val @@ -1461,12 +1860,13 @@ def test_write_log_entries_flattened_error(): with pytest.raises(ValueError): client.write_log_entries( logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) + @pytest.mark.asyncio async def test_write_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -1475,19 +1875,21 @@ async def test_write_log_entries_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging.WriteLogEntriesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.write_log_entries( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) # Establish that the underlying call was made with the expected @@ -1495,18 +1897,19 @@ async def test_write_log_entries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') + mock_val = monitored_resource_pb2.MonitoredResource(type="type_value") assert arg == mock_val arg = args[0].labels - mock_val = {'key_value': 'value_value'} + mock_val = {"key_value": "value_value"} assert arg == mock_val arg = args[0].entries - mock_val = [log_entry.LogEntry(log_name='log_name_value')] + mock_val = [log_entry.LogEntry(log_name="log_name_value")] assert arg == mock_val + @pytest.mark.asyncio async def test_write_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -1518,18 +1921,21 @@ async def test_write_log_entries_flattened_error_async(): with pytest.raises(ValueError): await client.write_log_entries( logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) -@pytest.mark.parametrize("request_type", [ - logging.ListLogEntriesRequest, - dict, -]) -def test_list_log_entries(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging.ListLogEntriesRequest, + dict, + ], +) +def test_list_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1540,12 +1946,10 @@ def test_list_log_entries(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_log_entries(request) @@ -1557,7 +1961,7 @@ def test_list_log_entries(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_log_entries_non_empty_request_with_auto_populated_field(): @@ -1565,32 +1969,33 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListLogEntriesRequest( - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogEntriesRequest( - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) + def test_list_log_entries_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1609,8 +2014,12 @@ def test_list_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_log_entries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_log_entries] = ( + mock_rpc + ) request = {} client.list_log_entries(request) @@ -1623,8 +2032,11 @@ def test_list_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1638,12 +2050,17 @@ async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_log_entries in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_log_entries + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_log_entries] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_log_entries + ] = mock_rpc request = {} await client.list_log_entries(request) @@ -1657,8 +2074,11 @@ async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): +async def test_list_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1669,13 +2089,13 @@ async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_t request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogEntriesResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_log_entries(request) # Establish that the underlying gRPC stub method was called. @@ -1686,7 +2106,7 @@ async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -1700,17 +2120,15 @@ def test_list_log_entries_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogEntriesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_entries( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) # Establish that the underlying call was made with the expected @@ -1718,13 +2136,13 @@ def test_list_log_entries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].resource_names - mock_val = ['resource_names_value'] + mock_val = ["resource_names_value"] assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val arg = args[0].order_by - mock_val = 'order_by_value' + mock_val = "order_by_value" assert arg == mock_val @@ -1738,11 +2156,12 @@ def test_list_log_entries_flattened_error(): with pytest.raises(ValueError): client.list_log_entries( logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) + @pytest.mark.asyncio async def test_list_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -1750,19 +2169,19 @@ async def test_list_log_entries_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogEntriesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogEntriesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_log_entries( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) # Establish that the underlying call was made with the expected @@ -1770,15 +2189,16 @@ async def test_list_log_entries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].resource_names - mock_val = ['resource_names_value'] + mock_val = ["resource_names_value"] assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val arg = args[0].order_by - mock_val = 'order_by_value' + mock_val = "order_by_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -1790,9 +2210,9 @@ async def test_list_log_entries_flattened_error_async(): with pytest.raises(ValueError): await client.list_log_entries( logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) @@ -1803,9 +2223,7 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogEntriesResponse( @@ -1814,17 +2232,17 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): log_entry.LogEntry(), log_entry.LogEntry(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogEntriesResponse( entries=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogEntriesResponse( entries=[ log_entry.LogEntry(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogEntriesResponse( entries=[ @@ -1846,8 +2264,9 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, log_entry.LogEntry) - for i in results) + assert all(isinstance(i, log_entry.LogEntry) for i in results) + + def test_list_log_entries_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1855,9 +2274,7 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogEntriesResponse( @@ -1866,17 +2283,17 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): log_entry.LogEntry(), log_entry.LogEntry(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogEntriesResponse( entries=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogEntriesResponse( entries=[ log_entry.LogEntry(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogEntriesResponse( entries=[ @@ -1887,9 +2304,10 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_log_entries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_log_entries_async_pager(): client = LoggingServiceV2AsyncClient( @@ -1898,8 +2316,8 @@ async def test_list_log_entries_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_log_entries), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_log_entries), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogEntriesResponse( @@ -1908,17 +2326,17 @@ async def test_list_log_entries_async_pager(): log_entry.LogEntry(), log_entry.LogEntry(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogEntriesResponse( entries=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogEntriesResponse( entries=[ log_entry.LogEntry(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogEntriesResponse( entries=[ @@ -1928,15 +2346,16 @@ async def test_list_log_entries_async_pager(): ), RuntimeError, ) - async_pager = await client.list_log_entries(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_log_entries( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, log_entry.LogEntry) - for i in responses) + assert all(isinstance(i, log_entry.LogEntry) for i in responses) @pytest.mark.asyncio @@ -1947,8 +2366,8 @@ async def test_list_log_entries_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_log_entries), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_log_entries), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogEntriesResponse( @@ -1957,17 +2376,17 @@ async def test_list_log_entries_async_pages(): log_entry.LogEntry(), log_entry.LogEntry(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogEntriesResponse( entries=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogEntriesResponse( entries=[ log_entry.LogEntry(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogEntriesResponse( entries=[ @@ -1980,18 +2399,22 @@ async def test_list_log_entries_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_log_entries(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging.ListMonitoredResourceDescriptorsRequest, - dict, -]) -def test_list_monitored_resource_descriptors(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging.ListMonitoredResourceDescriptorsRequest, + dict, + ], +) +def test_list_monitored_resource_descriptors(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2003,11 +2426,11 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = 'grp # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_monitored_resource_descriptors(request) @@ -2019,7 +2442,7 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = 'grp # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field(): @@ -2027,28 +2450,31 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListMonitoredResourceDescriptorsRequest( - page_token='page_token_value', + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_monitored_resource_descriptors(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListMonitoredResourceDescriptorsRequest( - page_token='page_token_value', + page_token="page_token_value", ) + def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2063,12 +2489,19 @@ def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_monitored_resource_descriptors in client._transport._wrapped_methods + assert ( + client._transport.list_monitored_resource_descriptors + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_monitored_resource_descriptors] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_monitored_resource_descriptors + ] = mock_rpc request = {} client.list_monitored_resource_descriptors(request) @@ -2081,8 +2514,11 @@ def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2096,12 +2532,17 @@ async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_monitored_resource_descriptors in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_monitored_resource_descriptors + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_monitored_resource_descriptors] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_monitored_resource_descriptors + ] = mock_rpc request = {} await client.list_monitored_resource_descriptors(request) @@ -2115,8 +2556,12 @@ async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): +async def test_list_monitored_resource_descriptors_async( + transport: str = "grpc_asyncio", + request_type=logging.ListMonitoredResourceDescriptorsRequest, +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2128,12 +2573,14 @@ async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListMonitoredResourceDescriptorsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client.list_monitored_resource_descriptors(request) # Establish that the underlying gRPC stub method was called. @@ -2144,7 +2591,7 @@ async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -2160,8 +2607,8 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( @@ -2170,17 +2617,17 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") monitored_resource_pb2.MonitoredResourceDescriptor(), monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[], - next_page_token='def', + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -2194,7 +2641,9 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") expected_metadata = () retry = retries.Retry() timeout = 5 - pager = client.list_monitored_resource_descriptors(request={}, retry=retry, timeout=timeout) + pager = client.list_monitored_resource_descriptors( + request={}, retry=retry, timeout=timeout + ) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -2202,8 +2651,12 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") results = list(pager) assert len(results) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) - for i in results) + assert all( + isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in results + ) + + def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2212,8 +2665,8 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( @@ -2222,17 +2675,17 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") monitored_resource_pb2.MonitoredResourceDescriptor(), monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[], - next_page_token='def', + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -2243,9 +2696,10 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") RuntimeError, ) pages = list(client.list_monitored_resource_descriptors(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pager(): client = LoggingServiceV2AsyncClient( @@ -2254,8 +2708,10 @@ async def test_list_monitored_resource_descriptors_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_monitored_resource_descriptors), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( @@ -2264,17 +2720,17 @@ async def test_list_monitored_resource_descriptors_async_pager(): monitored_resource_pb2.MonitoredResourceDescriptor(), monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[], - next_page_token='def', + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -2284,15 +2740,19 @@ async def test_list_monitored_resource_descriptors_async_pager(): ), RuntimeError, ) - async_pager = await client.list_monitored_resource_descriptors(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_monitored_resource_descriptors( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) - for i in responses) + assert all( + isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in responses + ) @pytest.mark.asyncio @@ -2303,8 +2763,10 @@ async def test_list_monitored_resource_descriptors_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_monitored_resource_descriptors), + "__call__", + new_callable=mock.AsyncMock, + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( @@ -2313,17 +2775,17 @@ async def test_list_monitored_resource_descriptors_async_pages(): monitored_resource_pb2.MonitoredResourceDescriptor(), monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[], - next_page_token='def', + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ monitored_resource_pb2.MonitoredResourceDescriptor(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -2336,18 +2798,22 @@ async def test_list_monitored_resource_descriptors_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_monitored_resource_descriptors(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging.ListLogsRequest, - dict, -]) -def test_list_logs(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging.ListLogsRequest, + dict, + ], +) +def test_list_logs(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2358,13 +2824,11 @@ def test_list_logs(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', + log_names=["log_names_value"], + next_page_token="next_page_token_value", ) response = client.list_logs(request) @@ -2376,8 +2840,8 @@ def test_list_logs(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' + assert response.log_names == ["log_names_value"] + assert response.next_page_token == "next_page_token_value" def test_list_logs_non_empty_request_with_auto_populated_field(): @@ -2385,30 +2849,31 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListLogsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_logs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_logs_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2427,7 +2892,9 @@ def test_list_logs_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_logs] = mock_rpc request = {} client.list_logs(request) @@ -2441,6 +2908,7 @@ def test_list_logs_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2456,12 +2924,17 @@ async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_logs in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_logs + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_logs] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_logs + ] = mock_rpc request = {} await client.list_logs(request) @@ -2475,8 +2948,11 @@ async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): +async def test_list_logs_async( + transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2487,14 +2963,14 @@ async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=log request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse( + log_names=["log_names_value"], + next_page_token="next_page_token_value", + ) + ) response = await client.list_logs(request) # Establish that the underlying gRPC stub method was called. @@ -2505,14 +2981,15 @@ async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=log # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsAsyncPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' + assert response.log_names == ["log_names_value"] + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test_list_logs_async_from_dict(): await test_list_logs_async(request_type=dict) + def test_list_logs_field_headers(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2522,12 +2999,10 @@ def test_list_logs_field_headers(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: call.return_value = logging.ListLogsResponse() client.list_logs(request) @@ -2539,9 +3014,9 @@ def test_list_logs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2554,13 +3029,13 @@ async def test_list_logs_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse() + ) await client.list_logs(request) # Establish that the underlying gRPC stub method was called. @@ -2571,9 +3046,9 @@ async def test_list_logs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_logs_flattened(): @@ -2582,15 +3057,13 @@ def test_list_logs_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_logs( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2598,7 +3071,7 @@ def test_list_logs_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -2612,9 +3085,10 @@ def test_list_logs_flattened_error(): with pytest.raises(ValueError): client.list_logs( logging.ListLogsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_logs_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -2622,17 +3096,17 @@ async def test_list_logs_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_logs( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2640,9 +3114,10 @@ async def test_list_logs_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_logs_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -2654,7 +3129,7 @@ async def test_list_logs_flattened_error_async(): with pytest.raises(ValueError): await client.list_logs( logging.ListLogsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -2665,9 +3140,7 @@ def test_list_logs_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( @@ -2676,17 +3149,17 @@ def test_list_logs_pager(transport_name: str = "grpc"): str(), str(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogsResponse( log_names=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogsResponse( log_names=[ str(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogsResponse( log_names=[ @@ -2701,9 +3174,7 @@ def test_list_logs_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_logs(request={}, retry=retry, timeout=timeout) @@ -2713,8 +3184,9 @@ def test_list_logs_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, str) - for i in results) + assert all(isinstance(i, str) for i in results) + + def test_list_logs_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2722,9 +3194,7 @@ def test_list_logs_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( @@ -2733,17 +3203,17 @@ def test_list_logs_pages(transport_name: str = "grpc"): str(), str(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogsResponse( log_names=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogsResponse( log_names=[ str(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogsResponse( log_names=[ @@ -2754,9 +3224,10 @@ def test_list_logs_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_logs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_logs_async_pager(): client = LoggingServiceV2AsyncClient( @@ -2765,8 +3236,8 @@ async def test_list_logs_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_logs), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_logs), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( @@ -2775,17 +3246,17 @@ async def test_list_logs_async_pager(): str(), str(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogsResponse( log_names=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogsResponse( log_names=[ str(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogsResponse( log_names=[ @@ -2795,15 +3266,16 @@ async def test_list_logs_async_pager(): ), RuntimeError, ) - async_pager = await client.list_logs(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_logs( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, str) - for i in responses) + assert all(isinstance(i, str) for i in responses) @pytest.mark.asyncio @@ -2814,8 +3286,8 @@ async def test_list_logs_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_logs), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_logs), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( @@ -2824,17 +3296,17 @@ async def test_list_logs_async_pages(): str(), str(), ], - next_page_token='abc', + next_page_token="abc", ), logging.ListLogsResponse( log_names=[], - next_page_token='def', + next_page_token="def", ), logging.ListLogsResponse( log_names=[ str(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging.ListLogsResponse( log_names=[ @@ -2847,18 +3319,22 @@ async def test_list_logs_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_logs(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging.TailLogEntriesRequest, - dict, -]) -def test_tail_log_entries(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging.TailLogEntriesRequest, + dict, + ], +) +def test_tail_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2870,9 +3346,7 @@ def test_tail_log_entries(request_type, transport: str = 'grpc'): requests = [request] # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.tail_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.tail_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([logging.TailLogEntriesResponse()]) response = client.tail_log_entries(iter(requests)) @@ -2905,8 +3379,12 @@ def test_tail_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.tail_log_entries] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.tail_log_entries] = ( + mock_rpc + ) request = [{}] client.tail_log_entries(request) @@ -2919,8 +3397,11 @@ def test_tail_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_tail_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2934,12 +3415,17 @@ async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.tail_log_entries in client._client._transport._wrapped_methods + assert ( + client._client._transport.tail_log_entries + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.tail_log_entries] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.tail_log_entries + ] = mock_rpc request = [{}] await client.tail_log_entries(request) @@ -2953,8 +3439,11 @@ async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): +async def test_tail_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest +): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2966,12 +3455,12 @@ async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_t requests = [request] # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.tail_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.tail_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[logging.TailLogEntriesResponse()]) + call.return_value.read = mock.AsyncMock( + side_effect=[logging.TailLogEntriesResponse()] + ) response = await client.tail_log_entries(iter(requests)) # Establish that the underlying gRPC stub method was called. @@ -3027,8 +3516,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = LoggingServiceV2Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -3050,6 +3538,7 @@ def test_transport_instance(): client = LoggingServiceV2Client(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( @@ -3064,17 +3553,22 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.LoggingServiceV2GrpcTransport, - transports.LoggingServiceV2GrpcAsyncIOTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = LoggingServiceV2Client.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -3084,8 +3578,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -3099,9 +3592,7 @@ def test_delete_log_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = None client.delete_log(request=None) @@ -3123,8 +3614,8 @@ def test_write_log_entries_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: call.return_value = logging.WriteLogEntriesResponse() client.write_log_entries(request=None) @@ -3145,9 +3636,7 @@ def test_list_log_entries_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: call.return_value = logging.ListLogEntriesResponse() client.list_log_entries(request=None) @@ -3169,8 +3658,8 @@ def test_list_monitored_resource_descriptors_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: call.return_value = logging.ListMonitoredResourceDescriptorsResponse() client.list_monitored_resource_descriptors(request=None) @@ -3191,9 +3680,7 @@ def test_list_logs_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: call.return_value = logging.ListLogsResponse() client.list_logs(request=None) @@ -3214,8 +3701,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -3230,9 +3716,7 @@ async def test_delete_log_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_log(request=None) @@ -3256,11 +3740,12 @@ async def test_write_log_entries_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: + type(client.transport.write_log_entries), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.WriteLogEntriesResponse() + ) await client.write_log_entries(request=None) # Establish that the underlying stub method was called. @@ -3281,13 +3766,13 @@ async def test_list_log_entries_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogEntriesResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_log_entries(request=None) # Establish that the underlying stub method was called. @@ -3309,12 +3794,14 @@ async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: + type(client.transport.list_monitored_resource_descriptors), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListMonitoredResourceDescriptorsResponse( + next_page_token="next_page_token_value", + ) + ) await client.list_monitored_resource_descriptors(request=None) # Establish that the underlying stub method was called. @@ -3335,14 +3822,14 @@ async def test_list_logs_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: + with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging.ListLogsResponse( + log_names=["log_names_value"], + next_page_token="next_page_token_value", + ) + ) await client.list_logs(request=None) # Establish that the underlying stub method was called. @@ -3363,18 +3850,21 @@ def test_transport_grpc_default(): transports.LoggingServiceV2GrpcTransport, ) + def test_logging_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.LoggingServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_logging_service_v2_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__') as Transport: + with mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__" + ) as Transport: Transport.return_value = None transport = transports.LoggingServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), @@ -3383,15 +3873,15 @@ def test_logging_service_v2_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'delete_log', - 'write_log_entries', - 'list_log_entries', - 'list_monitored_resource_descriptors', - 'list_logs', - 'tail_log_entries', - 'get_operation', - 'cancel_operation', - 'list_operations', + "delete_log", + "write_log_entries", + "list_log_entries", + "list_monitored_resource_descriptors", + "list_logs", + "tail_log_entries", + "get_operation", + "cancel_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3402,7 +3892,7 @@ def test_logging_service_v2_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -3411,29 +3901,42 @@ def test_logging_service_v2_base_transport(): def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id="octopus", ) def test_logging_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport() @@ -3442,18 +3945,18 @@ def test_logging_service_v2_base_transport_with_adc(): def test_logging_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) LoggingServiceV2Client() adc.assert_called_once_with( scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id=None, ) @@ -3468,12 +3971,18 @@ def test_logging_service_v2_auth_adc(): def test_logging_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id="octopus", ) @@ -3486,39 +3995,39 @@ def test_logging_service_v2_transport_auth_adc(transport_class): ], ) def test_logging_service_v2_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.LoggingServiceV2GrpcTransport, grpc_helpers), - (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "logging.googleapis.com:443", @@ -3526,12 +4035,12 @@ def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpe credentials_file=None, quota_project_id="octopus", default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), scopes=["1", "2"], default_host="logging.googleapis.com", ssl_credentials=None, @@ -3542,10 +4051,14 @@ def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpe ) -@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) -def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -3554,7 +4067,7 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -3575,45 +4088,52 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_logging_service_v2_host_no_port(transport_name): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'logging.googleapis.com:443' + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com" + ), + transport=transport_name, ) + assert client.transport._host == ("logging.googleapis.com:443") + -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_logging_service_v2_host_with_port(transport_name): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com:8000" + ), transport=transport_name, ) - assert client.transport._host == ( - 'logging.googleapis.com:8000' - ) + assert client.transport._host == ("logging.googleapis.com:8000") + def test_logging_service_v2_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.LoggingServiceV2GrpcTransport( @@ -3626,7 +4146,7 @@ def test_logging_service_v2_grpc_transport_channel(): def test_logging_service_v2_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.LoggingServiceV2GrpcAsyncIOTransport( @@ -3641,12 +4161,22 @@ def test_logging_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( - transport_class + transport_class, ): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -3655,7 +4185,7 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -3685,17 +4215,23 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) -def test_logging_service_v2_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -3726,7 +4262,10 @@ def test_logging_service_v2_transport_channel_mtls_with_adc( def test_log_path(): project = "squid" log = "clam" - expected = "projects/{project}/logs/{log}".format(project=project, log=log, ) + expected = "projects/{project}/logs/{log}".format( + project=project, + log=log, + ) actual = LoggingServiceV2Client.log_path(project, log) assert expected == actual @@ -3742,9 +4281,12 @@ def test_parse_log_path(): actual = LoggingServiceV2Client.parse_log_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = LoggingServiceV2Client.common_billing_account_path(billing_account) assert expected == actual @@ -3759,9 +4301,12 @@ def test_parse_common_billing_account_path(): actual = LoggingServiceV2Client.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = LoggingServiceV2Client.common_folder_path(folder) assert expected == actual @@ -3776,9 +4321,12 @@ def test_parse_common_folder_path(): actual = LoggingServiceV2Client.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = LoggingServiceV2Client.common_organization_path(organization) assert expected == actual @@ -3793,9 +4341,12 @@ def test_parse_common_organization_path(): actual = LoggingServiceV2Client.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = LoggingServiceV2Client.common_project_path(project) assert expected == actual @@ -3810,10 +4361,14 @@ def test_parse_common_project_path(): actual = LoggingServiceV2Client.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = LoggingServiceV2Client.common_location_path(project, location) assert expected == actual @@ -3833,14 +4388,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.LoggingServiceV2Transport, "_prep_wrapped_messages" + ) as prep: client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.LoggingServiceV2Transport, "_prep_wrapped_messages" + ) as prep: transport_class = LoggingServiceV2Client.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -3851,7 +4410,8 @@ def test_client_with_default_client_info(): def test_cancel_operation(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3871,10 +4431,12 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3884,9 +4446,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3909,7 +4469,7 @@ def test_cancel_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. @@ -3919,7 +4479,11 @@ def test_cancel_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): @@ -3934,9 +4498,7 @@ async def test_cancel_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3945,7 +4507,10 @@ async def test_cancel_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_cancel_operation_from_dict(): @@ -3964,6 +4529,7 @@ def test_cancel_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( @@ -3972,9 +4538,7 @@ async def test_cancel_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation( request={ "name": "locations", @@ -3998,6 +4562,7 @@ def test_cancel_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.CancelOperationRequest() + @pytest.mark.asyncio async def test_cancel_operation_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -4006,9 +4571,7 @@ async def test_cancel_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4018,7 +4581,8 @@ async def test_cancel_operation_flattened_async(): def test_get_operation(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4038,10 +4602,12 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4086,7 +4652,11 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): @@ -4112,7 +4682,10 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_get_operation_from_dict(): @@ -4131,6 +4704,7 @@ def test_get_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( @@ -4165,6 +4739,7 @@ def test_get_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.GetOperationRequest() + @pytest.mark.asyncio async def test_get_operation_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -4185,7 +4760,8 @@ async def test_get_operation_flattened_async(): def test_list_operations(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4205,10 +4781,12 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4253,7 +4831,11 @@ def test_list_operations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_operations_field_headers_async(): @@ -4279,7 +4861,10 @@ async def test_list_operations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_operations_from_dict(): @@ -4298,6 +4883,7 @@ def test_list_operations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = LoggingServiceV2AsyncClient( @@ -4332,6 +4918,7 @@ def test_list_operations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.ListOperationsRequest() + @pytest.mark.asyncio async def test_list_operations_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -4352,10 +4939,11 @@ async def test_list_operations_flattened_async(): def test_transport_close_grpc(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -4364,10 +4952,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -4375,12 +4964,11 @@ async def test_transport_close_grpc_asyncio(): def test_client_ctx(): transports = [ - 'grpc', + "grpc", ] for transport in transports: client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -4389,10 +4977,14 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -4407,7 +4999,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index e4c6d2122fec..131ef34ece82 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,38 +22,24 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio import json import math +from collections.abc import Mapping, Sequence + +import grpc import pytest -from collections.abc import Sequence, Mapping from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from grpc.experimental import aio from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.logging_v2.services.metrics_service_v2 import BaseMetricsServiceV2AsyncClient -from google.cloud.logging_v2.services.metrics_service_v2 import BaseMetricsServiceV2Client -from google.cloud.logging_v2.services.metrics_service_v2 import pagers -from google.cloud.logging_v2.services.metrics_service_v2 import transports -from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account import google.api.distribution_pb2 as distribution_pb2 # type: ignore import google.api.label_pb2 as label_pb2 # type: ignore import google.api.launch_stage_pb2 as launch_stage_pb2 # type: ignore @@ -60,8 +47,26 @@ import google.auth import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore - - +from google.api_core import ( + client_options, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.metrics_service_v2 import ( + BaseMetricsServiceV2AsyncClient, + BaseMetricsServiceV2Client, + pagers, + transports, +) +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -76,9 +81,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -86,17 +93,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -108,21 +125,52 @@ def test__get_default_mtls_endpoint(): custom_endpoint = ".custom" assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(None) is None - assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + assert ( + BaseMetricsServiceV2Client._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + BaseMetricsServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + BaseMetricsServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BaseMetricsServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BaseMetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + assert ( + BaseMetricsServiceV2Client._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + def test__read_environment_variables(): - assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "auto", None) + assert BaseMetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert BaseMetricsServiceV2Client._read_environment_variables() == (True, "auto", None) + assert BaseMetricsServiceV2Client._read_environment_variables() == ( + True, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "auto", None) + assert BaseMetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} @@ -136,27 +184,46 @@ def test__read_environment_variables(): ) else: assert BaseMetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BaseMetricsServiceV2Client._read_environment_variables() == ( False, - "auto", + "never", None, ) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "never", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "always", None) + assert BaseMetricsServiceV2Client._read_environment_variables() == ( + False, + "always", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "auto", None) + assert BaseMetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: BaseMetricsServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + assert BaseMetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -165,7 +232,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert BaseMetricsServiceV2Client._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -173,7 +242,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert BaseMetricsServiceV2Client._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -185,7 +256,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert BaseMetricsServiceV2Client._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -197,7 +270,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert BaseMetricsServiceV2Client._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -209,7 +284,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert BaseMetricsServiceV2Client._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -224,83 +301,177 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): BaseMetricsServiceV2Client._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert BaseMetricsServiceV2Client._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert BaseMetricsServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert BaseMetricsServiceV2Client._get_client_cert_source(None, False) is None - assert BaseMetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None - assert BaseMetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + BaseMetricsServiceV2Client._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + BaseMetricsServiceV2Client._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert BaseMetricsServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source - assert BaseMetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + BaseMetricsServiceV2Client._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + BaseMetricsServiceV2Client._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) -@mock.patch.object(BaseMetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2Client)) -@mock.patch.object(BaseMetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient)) + +@mock.patch.object( + BaseMetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseMetricsServiceV2Client), +) +@mock.patch.object( + BaseMetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = BaseMetricsServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert BaseMetricsServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert BaseMetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert BaseMetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert BaseMetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert BaseMetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert BaseMetricsServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert BaseMetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + BaseMetricsServiceV2Client._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + BaseMetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BaseMetricsServiceV2Client._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + BaseMetricsServiceV2Client._get_api_endpoint( + None, None, default_universe, "always" + ) + == BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BaseMetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BaseMetricsServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + BaseMetricsServiceV2Client._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - BaseMetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + BaseMetricsServiceV2Client._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert BaseMetricsServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert BaseMetricsServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert BaseMetricsServiceV2Client._get_universe_domain(None, None) == BaseMetricsServiceV2Client._DEFAULT_UNIVERSE + assert ( + BaseMetricsServiceV2Client._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + BaseMetricsServiceV2Client._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + BaseMetricsServiceV2Client._get_universe_domain(None, None) + == BaseMetricsServiceV2Client._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: BaseMetricsServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -316,7 +487,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -329,59 +501,83 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (BaseMetricsServiceV2Client, "grpc"), - (BaseMetricsServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_base_metrics_service_v2_client_from_service_account_info(client_class, transport_name): + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BaseMetricsServiceV2Client, "grpc"), + (BaseMetricsServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_base_metrics_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MetricsServiceV2GrpcTransport, "grpc"), - (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_base_metrics_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MetricsServiceV2GrpcTransport, "grpc"), + (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_base_metrics_service_v2_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (BaseMetricsServiceV2Client, "grpc"), - (BaseMetricsServiceV2AsyncClient, "grpc_asyncio"), -]) -def test_base_metrics_service_v2_client_from_service_account_file(client_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BaseMetricsServiceV2Client, "grpc"), + (BaseMetricsServiceV2AsyncClient, "grpc_asyncio"), + ], +) +def test_base_metrics_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ( - 'logging.googleapis.com:443' - ) + assert client.transport._host == ("logging.googleapis.com:443") def test_base_metrics_service_v2_client_get_transport_class(): @@ -395,29 +591,44 @@ def test_base_metrics_service_v2_client_get_transport_class(): assert transport == transports.MetricsServiceV2GrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), - (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(BaseMetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2Client)) -@mock.patch.object(BaseMetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient)) -def test_base_metrics_service_v2_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + ( + BaseMetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + BaseMetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseMetricsServiceV2Client), +) +@mock.patch.object( + BaseMetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient), +) +def test_base_metrics_service_v2_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(BaseMetricsServiceV2Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(BaseMetricsServiceV2Client, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(BaseMetricsServiceV2Client, 'get_transport_class') as gtc: + with mock.patch.object(BaseMetricsServiceV2Client, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -435,13 +646,15 @@ def test_base_metrics_service_v2_client_client_options(client_class, transport_c # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -453,7 +666,7 @@ def test_base_metrics_service_v2_client_client_options(client_class, transport_c # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -473,17 +686,22 @@ def test_base_metrics_service_v2_client_client_options(client_class, transport_c with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -492,46 +710,90 @@ def test_base_metrics_service_v2_client_client_options(client_class, transport_c api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" + api_audience="https://language.googleapis.com", ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "true"), - (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "false"), - (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(BaseMetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2Client)) -@mock.patch.object(BaseMetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient)) + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + BaseMetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + "true", + ), + ( + BaseMetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + BaseMetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + "false", + ), + ( + BaseMetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + BaseMetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseMetricsServiceV2Client), +) +@mock.patch.object( + BaseMetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_base_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_base_metrics_service_v2_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -550,12 +812,22 @@ def test_base_metrics_service_v2_client_mtls_env_auto(client_class, transport_cl # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -576,15 +848,22 @@ def test_base_metrics_service_v2_client_mtls_env_auto(client_class, transport_cl ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -594,19 +873,31 @@ def test_base_metrics_service_v2_client_mtls_env_auto(client_class, transport_cl ) -@pytest.mark.parametrize("client_class", [ - BaseMetricsServiceV2Client, BaseMetricsServiceV2AsyncClient -]) -@mock.patch.object(BaseMetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(BaseMetricsServiceV2Client)) -@mock.patch.object(BaseMetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BaseMetricsServiceV2AsyncClient)) +@pytest.mark.parametrize( + "client_class", [BaseMetricsServiceV2Client, BaseMetricsServiceV2AsyncClient] +) +@mock.patch.object( + BaseMetricsServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BaseMetricsServiceV2Client), +) +@mock.patch.object( + BaseMetricsServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BaseMetricsServiceV2AsyncClient), +) def test_base_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -614,18 +905,25 @@ def test_base_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -662,23 +960,23 @@ def test_base_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -709,23 +1007,23 @@ def test_base_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -741,16 +1039,27 @@ def test_base_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -760,27 +1069,50 @@ def test_base_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + -@pytest.mark.parametrize("client_class", [ - BaseMetricsServiceV2Client, BaseMetricsServiceV2AsyncClient -]) -@mock.patch.object(BaseMetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2Client)) -@mock.patch.object(BaseMetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient)) +@pytest.mark.parametrize( + "client_class", [BaseMetricsServiceV2Client, BaseMetricsServiceV2AsyncClient] +) +@mock.patch.object( + BaseMetricsServiceV2Client, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseMetricsServiceV2Client), +) +@mock.patch.object( + BaseMetricsServiceV2AsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient), +) def test_base_metrics_service_v2_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = BaseMetricsServiceV2Client._DEFAULT_UNIVERSE - default_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -803,11 +1135,19 @@ def test_base_metrics_service_v2_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -815,26 +1155,39 @@ def test_base_metrics_service_v2_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), - (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_base_metrics_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + ( + BaseMetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_base_metrics_service_v2_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -843,23 +1196,39 @@ def test_base_metrics_service_v2_client_client_options_scopes(client_class, tran api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", grpc_helpers), - (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_base_metrics_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BaseMetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BaseMetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_base_metrics_service_v2_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -868,11 +1237,14 @@ def test_base_metrics_service_v2_client_client_options_credentials_file(client_c api_audience=None, ) + def test_base_metrics_service_v2_client_client_options_from_dict(): - with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None client = BaseMetricsServiceV2Client( - client_options={'api_endpoint': 'squid.clam.whelk'} + client_options={"api_endpoint": "squid.clam.whelk"} ) grpc_transport.assert_called_once_with( credentials=None, @@ -887,23 +1259,38 @@ def test_base_metrics_service_v2_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", grpc_helpers), - (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_base_metrics_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BaseMetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BaseMetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_base_metrics_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -913,13 +1300,13 @@ def test_base_metrics_service_v2_client_create_channel_credentials_file(client_c ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -931,12 +1318,12 @@ def test_base_metrics_service_v2_client_create_channel_credentials_file(client_c credentials_file=None, quota_project_id=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), scopes=None, default_host="logging.googleapis.com", ssl_credentials=None, @@ -947,11 +1334,14 @@ def test_base_metrics_service_v2_client_create_channel_credentials_file(client_c ) -@pytest.mark.parametrize("request_type", [ - logging_metrics.ListLogMetricsRequest, - dict, -]) -def test__list_log_metrics(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.ListLogMetricsRequest, + dict, + ], +) +def test__list_log_metrics(request_type, transport: str = "grpc"): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -962,12 +1352,10 @@ def test__list_log_metrics(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client._list_log_metrics(request) @@ -979,7 +1367,7 @@ def test__list_log_metrics(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test__list_log_metrics_non_empty_request_with_auto_populated_field(): @@ -987,30 +1375,31 @@ def test__list_log_metrics_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.ListLogMetricsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._list_log_metrics(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.ListLogMetricsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test__list_log_metrics_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1029,8 +1418,12 @@ def test__list_log_metrics_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_log_metrics] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_log_metrics] = ( + mock_rpc + ) request = {} client._list_log_metrics(request) @@ -1043,8 +1436,11 @@ def test__list_log_metrics_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__list_log_metrics_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1058,12 +1454,17 @@ async def test__list_log_metrics_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_log_metrics in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_log_metrics + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_log_metrics] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_log_metrics + ] = mock_rpc request = {} await client._list_log_metrics(request) @@ -1077,8 +1478,11 @@ async def test__list_log_metrics_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): +async def test__list_log_metrics_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest +): client = BaseMetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1089,13 +1493,13 @@ async def test__list_log_metrics_async(transport: str = 'grpc_asyncio', request_ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse( + next_page_token="next_page_token_value", + ) + ) response = await client._list_log_metrics(request) # Establish that the underlying gRPC stub method was called. @@ -1106,13 +1510,14 @@ async def test__list_log_metrics_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio async def test__list_log_metrics_async_from_dict(): await test__list_log_metrics_async(request_type=dict) + def test__list_log_metrics_field_headers(): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1122,12 +1527,10 @@ def test__list_log_metrics_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: call.return_value = logging_metrics.ListLogMetricsResponse() client._list_log_metrics(request) @@ -1139,9 +1542,9 @@ def test__list_log_metrics_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1154,13 +1557,13 @@ async def test__list_log_metrics_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse() + ) await client._list_log_metrics(request) # Establish that the underlying gRPC stub method was called. @@ -1171,9 +1574,9 @@ async def test__list_log_metrics_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test__list_log_metrics_flattened(): @@ -1182,15 +1585,13 @@ def test__list_log_metrics_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._list_log_metrics( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1198,7 +1599,7 @@ def test__list_log_metrics_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1212,9 +1613,10 @@ def test__list_log_metrics_flattened_error(): with pytest.raises(ValueError): client._list_log_metrics( logging_metrics.ListLogMetricsRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test__list_log_metrics_flattened_async(): client = BaseMetricsServiceV2AsyncClient( @@ -1222,17 +1624,17 @@ async def test__list_log_metrics_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._list_log_metrics( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1240,9 +1642,10 @@ async def test__list_log_metrics_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test__list_log_metrics_flattened_error_async(): client = BaseMetricsServiceV2AsyncClient( @@ -1254,7 +1657,7 @@ async def test__list_log_metrics_flattened_error_async(): with pytest.raises(ValueError): await client._list_log_metrics( logging_metrics.ListLogMetricsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1265,9 +1668,7 @@ def test__list_log_metrics_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_metrics.ListLogMetricsResponse( @@ -1276,17 +1677,17 @@ def test__list_log_metrics_pager(transport_name: str = "grpc"): logging_metrics.LogMetric(), logging_metrics.LogMetric(), ], - next_page_token='abc', + next_page_token="abc", ), logging_metrics.ListLogMetricsResponse( metrics=[], - next_page_token='def', + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( metrics=[ logging_metrics.LogMetric(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( metrics=[ @@ -1301,9 +1702,7 @@ def test__list_log_metrics_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client._list_log_metrics(request={}, retry=retry, timeout=timeout) @@ -1313,8 +1712,9 @@ def test__list_log_metrics_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, logging_metrics.LogMetric) - for i in results) + assert all(isinstance(i, logging_metrics.LogMetric) for i in results) + + def test__list_log_metrics_pages(transport_name: str = "grpc"): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1322,9 +1722,7 @@ def test__list_log_metrics_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( logging_metrics.ListLogMetricsResponse( @@ -1333,17 +1731,17 @@ def test__list_log_metrics_pages(transport_name: str = "grpc"): logging_metrics.LogMetric(), logging_metrics.LogMetric(), ], - next_page_token='abc', + next_page_token="abc", ), logging_metrics.ListLogMetricsResponse( metrics=[], - next_page_token='def', + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( metrics=[ logging_metrics.LogMetric(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( metrics=[ @@ -1354,9 +1752,10 @@ def test__list_log_metrics_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client._list_log_metrics(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test__list_log_metrics_async_pager(): client = BaseMetricsServiceV2AsyncClient( @@ -1365,8 +1764,8 @@ async def test__list_log_metrics_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_log_metrics), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_metrics.ListLogMetricsResponse( @@ -1375,17 +1774,17 @@ async def test__list_log_metrics_async_pager(): logging_metrics.LogMetric(), logging_metrics.LogMetric(), ], - next_page_token='abc', + next_page_token="abc", ), logging_metrics.ListLogMetricsResponse( metrics=[], - next_page_token='def', + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( metrics=[ logging_metrics.LogMetric(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( metrics=[ @@ -1395,15 +1794,16 @@ async def test__list_log_metrics_async_pager(): ), RuntimeError, ) - async_pager = await client._list_log_metrics(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client._list_log_metrics( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, logging_metrics.LogMetric) - for i in responses) + assert all(isinstance(i, logging_metrics.LogMetric) for i in responses) @pytest.mark.asyncio @@ -1414,8 +1814,8 @@ async def test__list_log_metrics_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_log_metrics), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( logging_metrics.ListLogMetricsResponse( @@ -1424,17 +1824,17 @@ async def test__list_log_metrics_async_pages(): logging_metrics.LogMetric(), logging_metrics.LogMetric(), ], - next_page_token='abc', + next_page_token="abc", ), logging_metrics.ListLogMetricsResponse( metrics=[], - next_page_token='def', + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( metrics=[ logging_metrics.LogMetric(), ], - next_page_token='ghi', + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( metrics=[ @@ -1447,18 +1847,22 @@ async def test__list_log_metrics_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client._list_log_metrics(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - logging_metrics.GetLogMetricRequest, - dict, -]) -def test__get_log_metric(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.GetLogMetricRequest, + dict, + ], +) +def test__get_log_metric(request_type, transport: str = "grpc"): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1469,17 +1873,15 @@ def test__get_log_metric(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client._get_log_metric(request) @@ -1492,12 +1894,12 @@ def test__get_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1506,28 +1908,29 @@ def test__get_log_metric_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.GetLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._get_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.GetLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) + def test__get_log_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1546,7 +1949,9 @@ def test__get_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_log_metric] = mock_rpc request = {} client._get_log_metric(request) @@ -1560,8 +1965,11 @@ def test__get_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__get_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1575,12 +1983,17 @@ async def test__get_log_metric_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_log_metric in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_log_metric + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_log_metric] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_log_metric + ] = mock_rpc request = {} await client._get_log_metric(request) @@ -1594,8 +2007,11 @@ async def test__get_log_metric_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): +async def test__get_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest +): client = BaseMetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1606,19 +2022,19 @@ async def test__get_log_metric_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) response = await client._get_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1629,12 +2045,12 @@ async def test__get_log_metric_async(transport: str = 'grpc_asyncio', request_ty # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1642,6 +2058,7 @@ async def test__get_log_metric_async(transport: str = 'grpc_asyncio', request_ty async def test__get_log_metric_async_from_dict(): await test__get_log_metric_async(request_type=dict) + def test__get_log_metric_field_headers(): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1651,12 +2068,10 @@ def test__get_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: call.return_value = logging_metrics.LogMetric() client._get_log_metric(request) @@ -1668,9 +2083,9 @@ def test__get_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1683,13 +2098,13 @@ async def test__get_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) await client._get_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1700,9 +2115,9 @@ async def test__get_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] def test__get_log_metric_flattened(): @@ -1711,15 +2126,13 @@ def test__get_log_metric_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._get_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -1727,7 +2140,7 @@ def test__get_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @@ -1741,9 +2154,10 @@ def test__get_log_metric_flattened_error(): with pytest.raises(ValueError): client._get_log_metric( logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) + @pytest.mark.asyncio async def test__get_log_metric_flattened_async(): client = BaseMetricsServiceV2AsyncClient( @@ -1751,17 +2165,17 @@ async def test__get_log_metric_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._get_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -1769,9 +2183,10 @@ async def test__get_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val + @pytest.mark.asyncio async def test__get_log_metric_flattened_error_async(): client = BaseMetricsServiceV2AsyncClient( @@ -1783,15 +2198,18 @@ async def test__get_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client._get_log_metric( logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) -@pytest.mark.parametrize("request_type", [ - logging_metrics.CreateLogMetricRequest, - dict, -]) -def test__create_log_metric(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.CreateLogMetricRequest, + dict, + ], +) +def test__create_log_metric(request_type, transport: str = "grpc"): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1803,16 +2221,16 @@ def test__create_log_metric(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client._create_log_metric(request) @@ -1825,12 +2243,12 @@ def test__create_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1839,28 +2257,31 @@ def test__create_log_metric_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.CreateLogMetricRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.create_log_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._create_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.CreateLogMetricRequest( - parent='parent_value', + parent="parent_value", ) + def test__create_log_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1879,8 +2300,12 @@ def test__create_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_log_metric] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_log_metric] = ( + mock_rpc + ) request = {} client._create_log_metric(request) @@ -1893,8 +2318,11 @@ def test__create_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__create_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__create_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1908,12 +2336,17 @@ async def test__create_log_metric_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_log_metric in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_log_metric + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_log_metric] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_log_metric + ] = mock_rpc request = {} await client._create_log_metric(request) @@ -1927,8 +2360,11 @@ async def test__create_log_metric_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): +async def test__create_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest +): client = BaseMetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1940,18 +2376,20 @@ async def test__create_log_metric_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) response = await client._create_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1962,12 +2400,12 @@ async def test__create_log_metric_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1975,6 +2413,7 @@ async def test__create_log_metric_async(transport: str = 'grpc_asyncio', request async def test__create_log_metric_async_from_dict(): await test__create_log_metric_async(request_type=dict) + def test__create_log_metric_field_headers(): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1984,12 +2423,12 @@ def test__create_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: call.return_value = logging_metrics.LogMetric() client._create_log_metric(request) @@ -2001,9 +2440,9 @@ def test__create_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2016,13 +2455,15 @@ async def test__create_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + type(client.transport.create_log_metric), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) await client._create_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -2033,9 +2474,9 @@ async def test__create_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test__create_log_metric_flattened(): @@ -2045,15 +2486,15 @@ def test__create_log_metric_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._create_log_metric( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2061,10 +2502,10 @@ def test__create_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @@ -2078,10 +2519,11 @@ def test__create_log_metric_flattened_error(): with pytest.raises(ValueError): client._create_log_metric( logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) + @pytest.mark.asyncio async def test__create_log_metric_flattened_async(): client = BaseMetricsServiceV2AsyncClient( @@ -2090,17 +2532,19 @@ async def test__create_log_metric_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._create_log_metric( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2108,12 +2552,13 @@ async def test__create_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test__create_log_metric_flattened_error_async(): client = BaseMetricsServiceV2AsyncClient( @@ -2125,16 +2570,19 @@ async def test__create_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client._create_log_metric( logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - logging_metrics.UpdateLogMetricRequest, - dict, -]) -def test__update_log_metric(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.UpdateLogMetricRequest, + dict, + ], +) +def test__update_log_metric(request_type, transport: str = "grpc"): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2146,16 +2594,16 @@ def test__update_log_metric(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client._update_log_metric(request) @@ -2168,12 +2616,12 @@ def test__update_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -2182,28 +2630,31 @@ def test__update_log_metric_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.UpdateLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.update_log_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._update_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.UpdateLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) + def test__update_log_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2222,8 +2673,12 @@ def test__update_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_log_metric] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_log_metric] = ( + mock_rpc + ) request = {} client._update_log_metric(request) @@ -2236,8 +2691,11 @@ def test__update_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__update_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2251,12 +2709,17 @@ async def test__update_log_metric_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_log_metric in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_log_metric + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_log_metric] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_log_metric + ] = mock_rpc request = {} await client._update_log_metric(request) @@ -2270,8 +2733,11 @@ async def test__update_log_metric_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): +async def test__update_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest +): client = BaseMetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2283,18 +2749,20 @@ async def test__update_log_metric_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) response = await client._update_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -2305,12 +2773,12 @@ async def test__update_log_metric_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -2318,6 +2786,7 @@ async def test__update_log_metric_async(transport: str = 'grpc_asyncio', request async def test__update_log_metric_async_from_dict(): await test__update_log_metric_async(request_type=dict) + def test__update_log_metric_field_headers(): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2327,12 +2796,12 @@ def test__update_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: call.return_value = logging_metrics.LogMetric() client._update_log_metric(request) @@ -2344,9 +2813,9 @@ def test__update_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2359,13 +2828,15 @@ async def test__update_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + type(client.transport.update_log_metric), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) await client._update_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -2376,9 +2847,9 @@ async def test__update_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] def test__update_log_metric_flattened(): @@ -2388,15 +2859,15 @@ def test__update_log_metric_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._update_log_metric( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2404,10 +2875,10 @@ def test__update_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @@ -2421,10 +2892,11 @@ def test__update_log_metric_flattened_error(): with pytest.raises(ValueError): client._update_log_metric( logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) + @pytest.mark.asyncio async def test__update_log_metric_flattened_async(): client = BaseMetricsServiceV2AsyncClient( @@ -2433,17 +2905,19 @@ async def test__update_log_metric_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._update_log_metric( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2451,12 +2925,13 @@ async def test__update_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test__update_log_metric_flattened_error_async(): client = BaseMetricsServiceV2AsyncClient( @@ -2468,16 +2943,19 @@ async def test__update_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client._update_log_metric( logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - logging_metrics.DeleteLogMetricRequest, - dict, -]) -def test__delete_log_metric(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.DeleteLogMetricRequest, + dict, + ], +) +def test__delete_log_metric(request_type, transport: str = "grpc"): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2489,8 +2967,8 @@ def test__delete_log_metric(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None response = client._delete_log_metric(request) @@ -2510,28 +2988,31 @@ def test__delete_log_metric_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.DeleteLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.delete_log_metric), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._delete_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.DeleteLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) + def test__delete_log_metric_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2550,8 +3031,12 @@ def test__delete_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_log_metric] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_log_metric] = ( + mock_rpc + ) request = {} client._delete_log_metric(request) @@ -2564,8 +3049,11 @@ def test__delete_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__delete_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test__delete_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2579,12 +3067,17 @@ async def test__delete_log_metric_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_log_metric in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_log_metric + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_log_metric] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_log_metric + ] = mock_rpc request = {} await client._delete_log_metric(request) @@ -2598,8 +3091,11 @@ async def test__delete_log_metric_async_use_cached_wrapped_rpc(transport: str = assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test__delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): +async def test__delete_log_metric_async( + transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest +): client = BaseMetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2611,8 +3107,8 @@ async def test__delete_log_metric_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client._delete_log_metric(request) @@ -2631,6 +3127,7 @@ async def test__delete_log_metric_async(transport: str = 'grpc_asyncio', request async def test__delete_log_metric_async_from_dict(): await test__delete_log_metric_async(request_type=dict) + def test__delete_log_metric_field_headers(): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2640,12 +3137,12 @@ def test__delete_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: call.return_value = None client._delete_log_metric(request) @@ -2657,9 +3154,9 @@ def test__delete_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2672,12 +3169,12 @@ async def test__delete_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_log_metric(request) @@ -2689,9 +3186,9 @@ async def test__delete_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'metric_name=metric_name_value', - ) in kw['metadata'] + "x-goog-request-params", + "metric_name=metric_name_value", + ) in kw["metadata"] def test__delete_log_metric_flattened(): @@ -2701,14 +3198,14 @@ def test__delete_log_metric_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._delete_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -2716,7 +3213,7 @@ def test__delete_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @@ -2730,9 +3227,10 @@ def test__delete_log_metric_flattened_error(): with pytest.raises(ValueError): client._delete_log_metric( logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) + @pytest.mark.asyncio async def test__delete_log_metric_flattened_async(): client = BaseMetricsServiceV2AsyncClient( @@ -2741,8 +3239,8 @@ async def test__delete_log_metric_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2750,7 +3248,7 @@ async def test__delete_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._delete_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -2758,9 +3256,10 @@ async def test__delete_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val + @pytest.mark.asyncio async def test__delete_log_metric_flattened_error_async(): client = BaseMetricsServiceV2AsyncClient( @@ -2772,7 +3271,7 @@ async def test__delete_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client._delete_log_metric( logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) @@ -2814,8 +3313,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = BaseMetricsServiceV2Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -2837,6 +3335,7 @@ def test_transport_instance(): client = BaseMetricsServiceV2Client(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( @@ -2851,17 +3350,22 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.MetricsServiceV2GrpcTransport, - transports.MetricsServiceV2GrpcAsyncIOTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = BaseMetricsServiceV2Client.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -2871,8 +3375,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = BaseMetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -2886,9 +3389,7 @@ def test__list_log_metrics_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: call.return_value = logging_metrics.ListLogMetricsResponse() client._list_log_metrics(request=None) @@ -2909,9 +3410,7 @@ def test__get_log_metric_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: call.return_value = logging_metrics.LogMetric() client._get_log_metric(request=None) @@ -2933,8 +3432,8 @@ def test__create_log_metric_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: call.return_value = logging_metrics.LogMetric() client._create_log_metric(request=None) @@ -2956,8 +3455,8 @@ def test__update_log_metric_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: call.return_value = logging_metrics.LogMetric() client._update_log_metric(request=None) @@ -2979,8 +3478,8 @@ def test__delete_log_metric_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: call.return_value = None client._delete_log_metric(request=None) @@ -3001,8 +3500,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = BaseMetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -3017,13 +3515,13 @@ async def test__list_log_metrics_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: + with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.ListLogMetricsResponse( + next_page_token="next_page_token_value", + ) + ) await client._list_log_metrics(request=None) # Establish that the underlying stub method was called. @@ -3044,19 +3542,19 @@ async def test__get_log_metric_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: + with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) await client._get_log_metric(request=None) # Establish that the underlying stub method was called. @@ -3078,18 +3576,20 @@ async def test__create_log_metric_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: + type(client.transport.create_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) await client._create_log_metric(request=None) # Establish that the underlying stub method was called. @@ -3111,18 +3611,20 @@ async def test__update_log_metric_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: + type(client.transport.update_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_metrics.LogMetric( + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", + disabled=True, + value_extractor="value_extractor_value", + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + ) await client._update_log_metric(request=None) # Establish that the underlying stub method was called. @@ -3144,8 +3646,8 @@ async def test__delete_log_metric_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: + type(client.transport.delete_log_metric), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_log_metric(request=None) @@ -3168,18 +3670,21 @@ def test_transport_grpc_default(): transports.MetricsServiceV2GrpcTransport, ) + def test_metrics_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.MetricsServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_metrics_service_v2_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__') as Transport: + with mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__" + ) as Transport: Transport.return_value = None transport = transports.MetricsServiceV2Transport( credentials=ga_credentials.AnonymousCredentials(), @@ -3188,14 +3693,14 @@ def test_metrics_service_v2_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_log_metrics', - 'get_log_metric', - 'create_log_metric', - 'update_log_metric', - 'delete_log_metric', - 'get_operation', - 'cancel_operation', - 'list_operations', + "list_log_metrics", + "get_log_metric", + "create_log_metric", + "update_log_metric", + "delete_log_metric", + "get_operation", + "cancel_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -3206,7 +3711,7 @@ def test_metrics_service_v2_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -3215,29 +3720,42 @@ def test_metrics_service_v2_base_transport(): def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id="octopus", ) def test_metrics_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport() @@ -3246,18 +3764,18 @@ def test_metrics_service_v2_base_transport_with_adc(): def test_metrics_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) BaseMetricsServiceV2Client() adc.assert_called_once_with( scopes=None, default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id=None, ) @@ -3272,12 +3790,18 @@ def test_metrics_service_v2_auth_adc(): def test_metrics_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), quota_project_id="octopus", ) @@ -3290,39 +3814,39 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): ], ) def test_metrics_service_v2_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.MetricsServiceV2GrpcTransport, grpc_helpers), - (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "logging.googleapis.com:443", @@ -3330,12 +3854,12 @@ def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpe credentials_file=None, quota_project_id="octopus", default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), scopes=["1", "2"], default_host="logging.googleapis.com", ssl_credentials=None, @@ -3346,10 +3870,14 @@ def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpe ) -@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) -def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -3358,7 +3886,7 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -3379,45 +3907,52 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_metrics_service_v2_host_no_port(transport_name): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'logging.googleapis.com:443' + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com" + ), + transport=transport_name, ) + assert client.transport._host == ("logging.googleapis.com:443") + -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) def test_metrics_service_v2_host_with_port(transport_name): client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="logging.googleapis.com:8000" + ), transport=transport_name, ) - assert client.transport._host == ( - 'logging.googleapis.com:8000' - ) + assert client.transport._host == ("logging.googleapis.com:8000") + def test_metrics_service_v2_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.MetricsServiceV2GrpcTransport( @@ -3430,7 +3965,7 @@ def test_metrics_service_v2_grpc_transport_channel(): def test_metrics_service_v2_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.MetricsServiceV2GrpcAsyncIOTransport( @@ -3445,12 +3980,22 @@ def test_metrics_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( - transport_class + transport_class, ): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -3459,7 +4004,7 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -3489,17 +4034,23 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) -def test_metrics_service_v2_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +def test_metrics_service_v2_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -3530,7 +4081,10 @@ def test_metrics_service_v2_transport_channel_mtls_with_adc( def test_log_metric_path(): project = "squid" metric = "clam" - expected = "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + expected = "projects/{project}/metrics/{metric}".format( + project=project, + metric=metric, + ) actual = BaseMetricsServiceV2Client.log_metric_path(project, metric) assert expected == actual @@ -3546,9 +4100,12 @@ def test_parse_log_metric_path(): actual = BaseMetricsServiceV2Client.parse_log_metric_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = BaseMetricsServiceV2Client.common_billing_account_path(billing_account) assert expected == actual @@ -3563,9 +4120,12 @@ def test_parse_common_billing_account_path(): actual = BaseMetricsServiceV2Client.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = BaseMetricsServiceV2Client.common_folder_path(folder) assert expected == actual @@ -3580,9 +4140,12 @@ def test_parse_common_folder_path(): actual = BaseMetricsServiceV2Client.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = BaseMetricsServiceV2Client.common_organization_path(organization) assert expected == actual @@ -3597,9 +4160,12 @@ def test_parse_common_organization_path(): actual = BaseMetricsServiceV2Client.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = BaseMetricsServiceV2Client.common_project_path(project) assert expected == actual @@ -3614,10 +4180,14 @@ def test_parse_common_project_path(): actual = BaseMetricsServiceV2Client.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = BaseMetricsServiceV2Client.common_location_path(project, location) assert expected == actual @@ -3637,14 +4207,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MetricsServiceV2Transport, "_prep_wrapped_messages" + ) as prep: client = BaseMetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.MetricsServiceV2Transport, "_prep_wrapped_messages" + ) as prep: transport_class = BaseMetricsServiceV2Client.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -3655,7 +4229,8 @@ def test_client_with_default_client_info(): def test_cancel_operation(transport: str = "grpc"): client = BaseMetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3675,10 +4250,12 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = BaseMetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3688,9 +4265,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3713,7 +4288,7 @@ def test_cancel_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. @@ -3723,7 +4298,11 @@ def test_cancel_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): @@ -3738,9 +4317,7 @@ async def test_cancel_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3749,7 +4326,10 @@ async def test_cancel_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_cancel_operation_from_dict(): @@ -3768,6 +4348,7 @@ def test_cancel_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = BaseMetricsServiceV2AsyncClient( @@ -3776,9 +4357,7 @@ async def test_cancel_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation( request={ "name": "locations", @@ -3802,6 +4381,7 @@ def test_cancel_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.CancelOperationRequest() + @pytest.mark.asyncio async def test_cancel_operation_flattened_async(): client = BaseMetricsServiceV2AsyncClient( @@ -3810,9 +4390,7 @@ async def test_cancel_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3822,7 +4400,8 @@ async def test_cancel_operation_flattened_async(): def test_get_operation(transport: str = "grpc"): client = BaseMetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3842,10 +4421,12 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = BaseMetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3890,7 +4471,11 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): @@ -3916,7 +4501,10 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_get_operation_from_dict(): @@ -3935,6 +4523,7 @@ def test_get_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = BaseMetricsServiceV2AsyncClient( @@ -3969,6 +4558,7 @@ def test_get_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.GetOperationRequest() + @pytest.mark.asyncio async def test_get_operation_flattened_async(): client = BaseMetricsServiceV2AsyncClient( @@ -3989,7 +4579,8 @@ async def test_get_operation_flattened_async(): def test_list_operations(transport: str = "grpc"): client = BaseMetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4009,10 +4600,12 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = BaseMetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4057,7 +4650,11 @@ def test_list_operations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_operations_field_headers_async(): @@ -4083,7 +4680,10 @@ async def test_list_operations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_operations_from_dict(): @@ -4102,6 +4702,7 @@ def test_list_operations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = BaseMetricsServiceV2AsyncClient( @@ -4136,6 +4737,7 @@ def test_list_operations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.ListOperationsRequest() + @pytest.mark.asyncio async def test_list_operations_flattened_async(): client = BaseMetricsServiceV2AsyncClient( @@ -4156,10 +4758,11 @@ async def test_list_operations_flattened_async(): def test_transport_close_grpc(): client = BaseMetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -4168,10 +4771,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = BaseMetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -4179,12 +4783,11 @@ async def test_transport_close_grpc_asyncio(): def test_client_ctx(): transports = [ - 'grpc', + "grpc", ] for transport in transports: client = BaseMetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -4193,10 +4796,17 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport), - (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport), + ( + BaseMetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -4211,7 +4821,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py index b7a27a9b49cf..6e9b715b80c6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py @@ -28,7 +28,6 @@ import os import shlex import sys -import logging from typing import Any # If extensions (or modules to document with autodoc) are in another directory, @@ -83,9 +82,9 @@ root_doc = "index" # General information about the project. -project = u"google-cloud-redis" -copyright = u"2025, Google, LLC" -author = u"Google APIs" +project = "google-cloud-redis" +copyright = "2025, Google, LLC" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -285,7 +284,7 @@ ( root_doc, "google-cloud-redis.tex", - u"google-cloud-redis Documentation", + "google-cloud-redis Documentation", author, "manual", ) @@ -386,6 +385,7 @@ napoleon_use_param = True napoleon_use_rtype = True + # Setup for sphinx behaviors such as warning filters. class UnexpectedUnindentFilter(logging.Filter): """Filter out warnings about unexpected unindentation following bullet lists.""" @@ -413,5 +413,5 @@ def setup(app: Any) -> None: """ # Sphinx's logger is hierarchical. Adding a filter to the # root 'sphinx' logger will catch warnings from all sub-loggers. - logger = logging.getLogger('sphinx') + logger = logging.getLogger("sphinx") logger.addFilter(UnexpectedUnindentFilter()) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py index 98170c766532..03c75730e01e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py @@ -18,64 +18,68 @@ __version__ = package_version.__version__ +from google.cloud.redis_v1.services.cloud_redis.async_client import ( + CloudRedisAsyncClient, +) from google.cloud.redis_v1.services.cloud_redis.client import CloudRedisClient -from google.cloud.redis_v1.services.cloud_redis.async_client import CloudRedisAsyncClient - -from google.cloud.redis_v1.types.cloud_redis import CreateInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import DeleteInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import ExportInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import FailoverInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import GcsDestination -from google.cloud.redis_v1.types.cloud_redis import GcsSource -from google.cloud.redis_v1.types.cloud_redis import GetInstanceAuthStringRequest -from google.cloud.redis_v1.types.cloud_redis import GetInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import ImportInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import InputConfig -from google.cloud.redis_v1.types.cloud_redis import Instance -from google.cloud.redis_v1.types.cloud_redis import InstanceAuthString -from google.cloud.redis_v1.types.cloud_redis import ListInstancesRequest -from google.cloud.redis_v1.types.cloud_redis import ListInstancesResponse -from google.cloud.redis_v1.types.cloud_redis import LocationMetadata -from google.cloud.redis_v1.types.cloud_redis import MaintenancePolicy -from google.cloud.redis_v1.types.cloud_redis import MaintenanceSchedule -from google.cloud.redis_v1.types.cloud_redis import NodeInfo -from google.cloud.redis_v1.types.cloud_redis import OperationMetadata -from google.cloud.redis_v1.types.cloud_redis import OutputConfig -from google.cloud.redis_v1.types.cloud_redis import PersistenceConfig -from google.cloud.redis_v1.types.cloud_redis import RescheduleMaintenanceRequest -from google.cloud.redis_v1.types.cloud_redis import TlsCertificate -from google.cloud.redis_v1.types.cloud_redis import UpdateInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import UpgradeInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import WeeklyMaintenanceWindow -from google.cloud.redis_v1.types.cloud_redis import ZoneMetadata +from google.cloud.redis_v1.types.cloud_redis import ( + CreateInstanceRequest, + DeleteInstanceRequest, + ExportInstanceRequest, + FailoverInstanceRequest, + GcsDestination, + GcsSource, + GetInstanceAuthStringRequest, + GetInstanceRequest, + ImportInstanceRequest, + InputConfig, + Instance, + InstanceAuthString, + ListInstancesRequest, + ListInstancesResponse, + LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, + NodeInfo, + OperationMetadata, + OutputConfig, + PersistenceConfig, + RescheduleMaintenanceRequest, + TlsCertificate, + UpdateInstanceRequest, + UpgradeInstanceRequest, + WeeklyMaintenanceWindow, + ZoneMetadata, +) -__all__ = ('CloudRedisClient', - 'CloudRedisAsyncClient', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'ExportInstanceRequest', - 'FailoverInstanceRequest', - 'GcsDestination', - 'GcsSource', - 'GetInstanceAuthStringRequest', - 'GetInstanceRequest', - 'ImportInstanceRequest', - 'InputConfig', - 'Instance', - 'InstanceAuthString', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'LocationMetadata', - 'MaintenancePolicy', - 'MaintenanceSchedule', - 'NodeInfo', - 'OperationMetadata', - 'OutputConfig', - 'PersistenceConfig', - 'RescheduleMaintenanceRequest', - 'TlsCertificate', - 'UpdateInstanceRequest', - 'UpgradeInstanceRequest', - 'WeeklyMaintenanceWindow', - 'ZoneMetadata', +__all__ = ( + "CloudRedisClient", + "CloudRedisAsyncClient", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "ExportInstanceRequest", + "FailoverInstanceRequest", + "GcsDestination", + "GcsSource", + "GetInstanceAuthStringRequest", + "GetInstanceRequest", + "ImportInstanceRequest", + "InputConfig", + "Instance", + "InstanceAuthString", + "ListInstancesRequest", + "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "OutputConfig", + "PersistenceConfig", + "RescheduleMaintenanceRequest", + "TlsCertificate", + "UpdateInstanceRequest", + "UpgradeInstanceRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index c5182e2f8447..93d073abc00d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.redis_v1 import gapic_version as package_version +import sys import google.api_core as api_core -import sys +from google.cloud.redis_v1 import gapic_version as package_version __version__ = package_version.__version__ @@ -28,64 +28,70 @@ import importlib_metadata as metadata -from .services.cloud_redis import CloudRedisClient -from .services.cloud_redis import CloudRedisAsyncClient - -from .types.cloud_redis import CreateInstanceRequest -from .types.cloud_redis import DeleteInstanceRequest -from .types.cloud_redis import ExportInstanceRequest -from .types.cloud_redis import FailoverInstanceRequest -from .types.cloud_redis import GcsDestination -from .types.cloud_redis import GcsSource -from .types.cloud_redis import GetInstanceAuthStringRequest -from .types.cloud_redis import GetInstanceRequest -from .types.cloud_redis import ImportInstanceRequest -from .types.cloud_redis import InputConfig -from .types.cloud_redis import Instance -from .types.cloud_redis import InstanceAuthString -from .types.cloud_redis import ListInstancesRequest -from .types.cloud_redis import ListInstancesResponse -from .types.cloud_redis import LocationMetadata -from .types.cloud_redis import MaintenancePolicy -from .types.cloud_redis import MaintenanceSchedule -from .types.cloud_redis import NodeInfo -from .types.cloud_redis import OperationMetadata -from .types.cloud_redis import OutputConfig -from .types.cloud_redis import PersistenceConfig -from .types.cloud_redis import RescheduleMaintenanceRequest -from .types.cloud_redis import TlsCertificate -from .types.cloud_redis import UpdateInstanceRequest -from .types.cloud_redis import UpgradeInstanceRequest -from .types.cloud_redis import WeeklyMaintenanceWindow -from .types.cloud_redis import ZoneMetadata +from .services.cloud_redis import CloudRedisAsyncClient, CloudRedisClient +from .types.cloud_redis import ( + CreateInstanceRequest, + DeleteInstanceRequest, + ExportInstanceRequest, + FailoverInstanceRequest, + GcsDestination, + GcsSource, + GetInstanceAuthStringRequest, + GetInstanceRequest, + ImportInstanceRequest, + InputConfig, + Instance, + InstanceAuthString, + ListInstancesRequest, + ListInstancesResponse, + LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, + NodeInfo, + OperationMetadata, + OutputConfig, + PersistenceConfig, + RescheduleMaintenanceRequest, + TlsCertificate, + UpdateInstanceRequest, + UpgradeInstanceRequest, + WeeklyMaintenanceWindow, + ZoneMetadata, +) -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.redis_v1") # type: ignore - api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr( + api_core, "check_dependency_versions" +): # pragma: NO COVER + api_core.check_python_version("google.cloud.redis_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: - import warnings import sys + import warnings _py_version_str = sys.version.split()[0] _package_label = "google.cloud.redis_v1" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -123,54 +129,58 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'CloudRedisAsyncClient', -'CloudRedisClient', -'CreateInstanceRequest', -'DeleteInstanceRequest', -'ExportInstanceRequest', -'FailoverInstanceRequest', -'GcsDestination', -'GcsSource', -'GetInstanceAuthStringRequest', -'GetInstanceRequest', -'ImportInstanceRequest', -'InputConfig', -'Instance', -'InstanceAuthString', -'ListInstancesRequest', -'ListInstancesResponse', -'LocationMetadata', -'MaintenancePolicy', -'MaintenanceSchedule', -'NodeInfo', -'OperationMetadata', -'OutputConfig', -'PersistenceConfig', -'RescheduleMaintenanceRequest', -'TlsCertificate', -'UpdateInstanceRequest', -'UpgradeInstanceRequest', -'WeeklyMaintenanceWindow', -'ZoneMetadata', + "CloudRedisAsyncClient", + "CloudRedisClient", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "ExportInstanceRequest", + "FailoverInstanceRequest", + "GcsDestination", + "GcsSource", + "GetInstanceAuthStringRequest", + "GetInstanceRequest", + "ImportInstanceRequest", + "InputConfig", + "Instance", + "InstanceAuthString", + "ListInstancesRequest", + "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "OutputConfig", + "PersistenceConfig", + "RescheduleMaintenanceRequest", + "TlsCertificate", + "UpdateInstanceRequest", + "UpgradeInstanceRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py index f302df64f13f..b2a40b86386e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import CloudRedisClient from .async_client import CloudRedisAsyncClient +from .client import CloudRedisClient __all__ = ( - 'CloudRedisClient', - 'CloudRedisAsyncClient', + "CloudRedisClient", + "CloudRedisAsyncClient", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index ed05908d0ca6..e925fb02f4a6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -14,47 +14,59 @@ # limitations under the License. # import logging as std_logging -from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.redis_v1 import gapic_version as package_version +from collections import OrderedDict +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.redis_v1 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.redis_v1.services.cloud_redis import pagers -from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1.services.cloud_redis import pagers +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore + from .client import CloudRedisClient +from .transports.base import DEFAULT_CLIENT_INFO, CloudRedisTransport +from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class CloudRedisAsyncClient: """Configures and manages Cloud Memorystore for Redis instances @@ -90,16 +102,24 @@ class CloudRedisAsyncClient: instance_path = staticmethod(CloudRedisClient.instance_path) parse_instance_path = staticmethod(CloudRedisClient.parse_instance_path) - common_billing_account_path = staticmethod(CloudRedisClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(CloudRedisClient.parse_common_billing_account_path) + common_billing_account_path = staticmethod( + CloudRedisClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudRedisClient.parse_common_billing_account_path + ) common_folder_path = staticmethod(CloudRedisClient.common_folder_path) parse_common_folder_path = staticmethod(CloudRedisClient.parse_common_folder_path) common_organization_path = staticmethod(CloudRedisClient.common_organization_path) - parse_common_organization_path = staticmethod(CloudRedisClient.parse_common_organization_path) + parse_common_organization_path = staticmethod( + CloudRedisClient.parse_common_organization_path + ) common_project_path = staticmethod(CloudRedisClient.common_project_path) parse_common_project_path = staticmethod(CloudRedisClient.parse_common_project_path) common_location_path = staticmethod(CloudRedisClient.common_location_path) - parse_common_location_path = staticmethod(CloudRedisClient.parse_common_location_path) + parse_common_location_path = staticmethod( + CloudRedisClient.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -141,7 +161,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -204,12 +226,16 @@ def universe_domain(self) -> str: get_transport_class = CloudRedisClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the cloud redis async client. Args: @@ -267,31 +293,39 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisAsyncClient`.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.cloud.redis.v1.CloudRedis", "credentialsType": None, - } + }, ) - async def list_instances(self, - request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesAsyncPager: + async def list_instances( + self, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesAsyncPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -364,10 +398,14 @@ async def sample_list_instances(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -381,14 +419,14 @@ async def sample_list_instances(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instances] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_instances + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -416,14 +454,15 @@ async def sample_list_instances(): # Done; return the response. return response - async def get_instance(self, - request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.Instance: + async def get_instance( + self, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. .. code-block:: python @@ -480,10 +519,14 @@ async def sample_get_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -497,14 +540,14 @@ async def sample_get_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -521,14 +564,15 @@ async def sample_get_instance(): # Done; return the response. return response - async def get_instance_auth_string(self, - request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.InstanceAuthString: + async def get_instance_auth_string( + self, + request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.InstanceAuthString: r"""Gets the AUTH string for a Redis instance. If AUTH is not enabled for the instance the response will be empty. This information is not included in the details returned @@ -588,10 +632,14 @@ async def sample_get_instance_auth_string(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -605,14 +653,14 @@ async def sample_get_instance_auth_string(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance_auth_string] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_instance_auth_string + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -629,16 +677,17 @@ async def sample_get_instance_auth_string(): # Done; return the response. return response - async def create_instance(self, - request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_instance( + self, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -744,10 +793,14 @@ async def sample_create_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, instance_id, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -765,14 +818,14 @@ async def sample_create_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -797,15 +850,16 @@ async def sample_create_instance(): # Done; return the response. return response - async def update_instance(self, - request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, - *, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_instance( + self, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, + *, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Updates the metadata and configuration of a specific Redis instance. Completed longrunning.Operation will contain the new @@ -895,10 +949,14 @@ async def sample_update_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [update_mask, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -914,14 +972,16 @@ async def sample_update_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), ) # Validate the universe domain. @@ -946,15 +1006,16 @@ async def sample_update_instance(): # Done; return the response. return response - async def upgrade_instance(self, - request: Optional[Union[cloud_redis.UpgradeInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - redis_version: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def upgrade_instance( + self, + request: Optional[Union[cloud_redis.UpgradeInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + redis_version: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Upgrades Redis instance to the newer Redis version specified in the request. @@ -1029,10 +1090,14 @@ async def sample_upgrade_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, redis_version] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1048,14 +1113,14 @@ async def sample_upgrade_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.upgrade_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.upgrade_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1080,15 +1145,16 @@ async def sample_upgrade_instance(): # Done; return the response. return response - async def import_instance(self, - request: Optional[Union[cloud_redis.ImportInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[cloud_redis.InputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def import_instance( + self, + request: Optional[Union[cloud_redis.ImportInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + input_config: Optional[cloud_redis.InputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Import a Redis RDB snapshot file from Cloud Storage into a Redis instance. Redis may stop serving during this operation. Instance @@ -1173,10 +1239,14 @@ async def sample_import_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, input_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1192,14 +1262,14 @@ async def sample_import_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.import_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.import_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1224,15 +1294,16 @@ async def sample_import_instance(): # Done; return the response. return response - async def export_instance(self, - request: Optional[Union[cloud_redis.ExportInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[cloud_redis.OutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def export_instance( + self, + request: Optional[Union[cloud_redis.ExportInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + output_config: Optional[cloud_redis.OutputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. @@ -1314,10 +1385,14 @@ async def sample_export_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1333,14 +1408,14 @@ async def sample_export_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.export_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1365,15 +1440,18 @@ async def sample_export_instance(): # Done; return the response. return response - async def failover_instance(self, - request: Optional[Union[cloud_redis.FailoverInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def failover_instance( + self, + request: Optional[Union[cloud_redis.FailoverInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + data_protection_mode: Optional[ + cloud_redis.FailoverInstanceRequest.DataProtectionMode + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -1449,10 +1527,14 @@ async def sample_failover_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, data_protection_mode] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1468,14 +1550,14 @@ async def sample_failover_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.failover_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.failover_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1500,14 +1582,15 @@ async def sample_failover_instance(): # Done; return the response. return response - async def delete_instance(self, - request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_instance( + self, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -1581,10 +1664,14 @@ async def sample_delete_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1598,14 +1685,14 @@ async def sample_delete_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1630,16 +1717,19 @@ async def sample_delete_instance(): # Done; return the response. return response - async def reschedule_maintenance(self, - request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, - *, - name: Optional[str] = None, - reschedule_type: Optional[cloud_redis.RescheduleMaintenanceRequest.RescheduleType] = None, - schedule_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def reschedule_maintenance( + self, + request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, + *, + name: Optional[str] = None, + reschedule_type: Optional[ + cloud_redis.RescheduleMaintenanceRequest.RescheduleType + ] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Reschedule maintenance for a given instance in a given project and location. @@ -1722,10 +1812,14 @@ async def sample_reschedule_maintenance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, reschedule_type, schedule_time] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1743,14 +1837,14 @@ async def sample_reschedule_maintenance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.reschedule_maintenance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.reschedule_maintenance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1817,8 +1911,7 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1826,7 +1919,11 @@ async def list_operations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1873,8 +1970,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1882,7 +1978,11 @@ async def get_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1933,15 +2033,19 @@ async def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def cancel_operation( self, @@ -1988,15 +2092,19 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def wait_operation( self, @@ -2046,8 +2154,7 @@ async def wait_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -2055,7 +2162,11 @@ async def wait_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2102,8 +2213,7 @@ async def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -2111,7 +2221,11 @@ async def get_location( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2158,8 +2272,7 @@ async def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -2167,7 +2280,11 @@ async def list_locations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2178,12 +2295,13 @@ async def __aenter__(self) -> "CloudRedisAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "CloudRedisAsyncClient", -) +__all__ = ("CloudRedisAsyncClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index e03925e7f332..fe32360ff603 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -13,27 +13,38 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -from http import HTTPStatus import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) -from google.cloud.redis_v1 import gapic_version as package_version - +import google.protobuf from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.redis_v1 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -42,29 +53,33 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.redis_v1.services.cloud_redis import pagers -from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1.services.cloud_redis import pagers +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore + +from .transports.base import DEFAULT_CLIENT_INFO, CloudRedisTransport from .transports.grpc import CloudRedisGrpcTransport from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .transports.rest import CloudRedisRestTransport + try: from .transports.rest_asyncio import AsyncCloudRedisRestTransport + HAS_ASYNC_REST_DEPENDENCIES = True -except ImportError as e: # pragma: NO COVER +except ImportError as e: # pragma: NO COVER HAS_ASYNC_REST_DEPENDENCIES = False ASYNC_REST_EXCEPTION = e @@ -76,6 +91,7 @@ class CloudRedisClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport @@ -83,9 +99,10 @@ class CloudRedisClientMeta(type): if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[CloudRedisTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CloudRedisTransport]: """Returns an appropriate transport class. Args: @@ -96,7 +113,9 @@ def get_transport_class(cls, The transport class to use. """ # If a specific transport is requested, return that one. - if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + if ( + label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES + ): # pragma: NO COVER raise ASYNC_REST_EXCEPTION if label: return cls._transport_registry[label] @@ -184,14 +203,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -230,8 +251,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: CloudRedisClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -248,73 +268,108 @@ def transport(self) -> CloudRedisTransport: return self._transport @staticmethod - def instance_path(project: str,location: str,instance: str,) -> str: + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified instance string.""" - return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: + def parse_instance_path(path: str) -> Dict[str, str]: """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -346,14 +401,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = CloudRedisClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -366,7 +425,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -391,7 +452,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -414,7 +477,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint) -> str: + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: """Return the API endpoint used by the client. Args: @@ -430,17 +495,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = CloudRedisClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = CloudRedisClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -476,15 +549,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -517,12 +593,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the cloud redis client. Args: @@ -580,13 +660,21 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CloudRedisClient._read_environment_variables() - self._client_cert_source = CloudRedisClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = CloudRedisClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + CloudRedisClient._read_environment_variables() + ) + self._client_cert_source = CloudRedisClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = CloudRedisClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. @@ -598,7 +686,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -607,25 +697,28 @@ def __init__(self, *, if transport_provided: # transport is a CloudRedisTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly." ) self._transport = cast(CloudRedisTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - CloudRedisClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or CloudRedisClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: - transport_init: Union[Type[CloudRedisTransport], Callable[..., CloudRedisTransport]] = ( + transport_init: Union[ + Type[CloudRedisTransport], Callable[..., CloudRedisTransport] + ] = ( CloudRedisClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudRedisTransport], transport) @@ -638,9 +731,12 @@ def __init__(self, *, "google.api_core.client_options.ClientOptions.quota_project_id": self._client_options.quota_project_id, "google.api_core.client_options.ClientOptions.client_cert_source": self._client_options.client_cert_source, "google.api_core.client_options.ClientOptions.api_audience": self._client_options.api_audience, - } - provided_unsupported_params = [name for name, value in unsupported_params.items() if value is not None] + provided_unsupported_params = [ + name + for name, value in unsupported_params.items() + if value is not None + ] if provided_unsupported_params: raise core_exceptions.AsyncRestUnsupportedParameterError( # type: ignore f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}" @@ -654,8 +750,12 @@ def __init__(self, *, import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) # initialize with the provided callable or the passed in class self._transport = transport_init( @@ -671,28 +771,37 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisClient`.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.cloud.redis.v1.CloudRedis", "credentialsType": None, - } + }, ) - def list_instances(self, - request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesPager: + def list_instances( + self, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -765,10 +874,14 @@ def sample_list_instances(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -786,9 +899,7 @@ def sample_list_instances(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -816,14 +927,15 @@ def sample_list_instances(): # Done; return the response. return response - def get_instance(self, - request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.Instance: + def get_instance( + self, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. .. code-block:: python @@ -880,10 +992,14 @@ def sample_get_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -901,9 +1017,7 @@ def sample_get_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -920,14 +1034,15 @@ def sample_get_instance(): # Done; return the response. return response - def get_instance_auth_string(self, - request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.InstanceAuthString: + def get_instance_auth_string( + self, + request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.InstanceAuthString: r"""Gets the AUTH string for a Redis instance. If AUTH is not enabled for the instance the response will be empty. This information is not included in the details returned @@ -987,10 +1102,14 @@ def sample_get_instance_auth_string(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1008,9 +1127,7 @@ def sample_get_instance_auth_string(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1027,16 +1144,17 @@ def sample_get_instance_auth_string(): # Done; return the response. return response - def create_instance(self, - request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_instance( + self, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -1142,10 +1260,14 @@ def sample_create_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, instance_id, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1167,9 +1289,7 @@ def sample_create_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1194,15 +1314,16 @@ def sample_create_instance(): # Done; return the response. return response - def update_instance(self, - request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, - *, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_instance( + self, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, + *, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Updates the metadata and configuration of a specific Redis instance. Completed longrunning.Operation will contain the new @@ -1292,10 +1413,14 @@ def sample_update_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [update_mask, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1315,9 +1440,9 @@ def sample_update_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), ) # Validate the universe domain. @@ -1342,15 +1467,16 @@ def sample_update_instance(): # Done; return the response. return response - def upgrade_instance(self, - request: Optional[Union[cloud_redis.UpgradeInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - redis_version: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def upgrade_instance( + self, + request: Optional[Union[cloud_redis.UpgradeInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + redis_version: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Upgrades Redis instance to the newer Redis version specified in the request. @@ -1425,10 +1551,14 @@ def sample_upgrade_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, redis_version] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1448,9 +1578,7 @@ def sample_upgrade_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1475,15 +1603,16 @@ def sample_upgrade_instance(): # Done; return the response. return response - def import_instance(self, - request: Optional[Union[cloud_redis.ImportInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[cloud_redis.InputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def import_instance( + self, + request: Optional[Union[cloud_redis.ImportInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + input_config: Optional[cloud_redis.InputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Import a Redis RDB snapshot file from Cloud Storage into a Redis instance. Redis may stop serving during this operation. Instance @@ -1568,10 +1697,14 @@ def sample_import_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, input_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1591,9 +1724,7 @@ def sample_import_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1618,15 +1749,16 @@ def sample_import_instance(): # Done; return the response. return response - def export_instance(self, - request: Optional[Union[cloud_redis.ExportInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[cloud_redis.OutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def export_instance( + self, + request: Optional[Union[cloud_redis.ExportInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + output_config: Optional[cloud_redis.OutputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. @@ -1708,10 +1840,14 @@ def sample_export_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1731,9 +1867,7 @@ def sample_export_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1758,15 +1892,18 @@ def sample_export_instance(): # Done; return the response. return response - def failover_instance(self, - request: Optional[Union[cloud_redis.FailoverInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def failover_instance( + self, + request: Optional[Union[cloud_redis.FailoverInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + data_protection_mode: Optional[ + cloud_redis.FailoverInstanceRequest.DataProtectionMode + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -1842,10 +1979,14 @@ def sample_failover_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, data_protection_mode] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1865,9 +2006,7 @@ def sample_failover_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1892,14 +2031,15 @@ def sample_failover_instance(): # Done; return the response. return response - def delete_instance(self, - request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_instance( + self, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -1973,10 +2113,14 @@ def sample_delete_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1994,9 +2138,7 @@ def sample_delete_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2021,16 +2163,19 @@ def sample_delete_instance(): # Done; return the response. return response - def reschedule_maintenance(self, - request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, - *, - name: Optional[str] = None, - reschedule_type: Optional[cloud_redis.RescheduleMaintenanceRequest.RescheduleType] = None, - schedule_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def reschedule_maintenance( + self, + request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, + *, + name: Optional[str] = None, + reschedule_type: Optional[ + cloud_redis.RescheduleMaintenanceRequest.RescheduleType + ] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Reschedule maintenance for a given instance in a given project and location. @@ -2113,10 +2258,14 @@ def sample_reschedule_maintenance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name, reschedule_type, schedule_time] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2138,9 +2287,7 @@ def sample_reschedule_maintenance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -2220,8 +2367,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -2230,7 +2376,11 @@ def list_operations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2280,8 +2430,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -2290,7 +2439,11 @@ def get_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2344,15 +2497,19 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -2399,15 +2556,19 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def wait_operation( self, @@ -2457,8 +2618,7 @@ def wait_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -2467,7 +2627,11 @@ def wait_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2517,8 +2681,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -2527,7 +2690,11 @@ def get_location( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2577,8 +2744,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -2587,7 +2753,11 @@ def list_locations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2596,11 +2766,11 @@ def list_locations( raise e -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "CloudRedisClient", -) +__all__ = ("CloudRedisClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index 4e0e4cb32343..10b0f261891a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -13,13 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore @@ -44,14 +58,17 @@ class ListInstancesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., cloud_redis.ListInstancesResponse], - request: cloud_redis.ListInstancesRequest, - response: cloud_redis.ListInstancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., cloud_redis.ListInstancesResponse], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -84,7 +101,12 @@ def pages(self) -> Iterator[cloud_redis.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[cloud_redis.Instance]: @@ -92,7 +114,7 @@ def __iter__(self) -> Iterator[cloud_redis.Instance]: yield from page.instances def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListInstancesAsyncPager: @@ -112,14 +134,17 @@ class ListInstancesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]], - request: cloud_redis.ListInstancesRequest, - response: cloud_redis.ListInstancesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -152,8 +177,14 @@ async def pages(self) -> AsyncIterator[cloud_redis.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[cloud_redis.Instance]: async def async_generator(): async for page in self.pages: @@ -163,4 +194,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 1cbbf54c2525..c529d7ba2173 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -14,18 +14,24 @@ # limitations under the License. # from collections import OrderedDict -from typing import Dict, Type, Tuple +from typing import Dict, Tuple, Type from .base import CloudRedisTransport from .grpc import CloudRedisGrpcTransport from .grpc_asyncio import CloudRedisGrpcAsyncIOTransport -from .rest import CloudRedisRestTransport -from .rest import CloudRedisRestInterceptor +from .rest import CloudRedisRestInterceptor, CloudRedisRestTransport + ASYNC_REST_CLASSES: Tuple[str, ...] try: - from .rest_asyncio import AsyncCloudRedisRestTransport - from .rest_asyncio import AsyncCloudRedisRestInterceptor - ASYNC_REST_CLASSES = ('AsyncCloudRedisRestTransport', 'AsyncCloudRedisRestInterceptor') + from .rest_asyncio import ( + AsyncCloudRedisRestInterceptor, + AsyncCloudRedisRestTransport, + ) + + ASYNC_REST_CLASSES = ( + "AsyncCloudRedisRestTransport", + "AsyncCloudRedisRestInterceptor", + ) HAS_REST_ASYNC = True except ImportError: # pragma: NO COVER ASYNC_REST_CLASSES = () @@ -34,16 +40,16 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] -_transport_registry['grpc'] = CloudRedisGrpcTransport -_transport_registry['grpc_asyncio'] = CloudRedisGrpcAsyncIOTransport -_transport_registry['rest'] = CloudRedisRestTransport +_transport_registry["grpc"] = CloudRedisGrpcTransport +_transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport +_transport_registry["rest"] = CloudRedisRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry['rest_asyncio'] = AsyncCloudRedisRestTransport + _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport __all__ = ( - 'CloudRedisTransport', - 'CloudRedisGrpcTransport', - 'CloudRedisGrpcAsyncIOTransport', - 'CloudRedisRestTransport', - 'CloudRedisRestInterceptor', + "CloudRedisTransport", + "CloudRedisGrpcTransport", + "CloudRedisGrpcAsyncIOTransport", + "CloudRedisRestTransport", + "CloudRedisRestInterceptor", ) + ASYNC_REST_CLASSES diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index aef1646cde60..38f68afd8210 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -16,23 +16,22 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.redis_v1 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core +import google.auth # type: ignore +import google.protobuf from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1 import gapic_version as package_version from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -41,24 +40,23 @@ class CloudRedisTransport(abc.ABC): """Abstract transport class for CloudRedis.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = 'redis.googleapis.com' + DEFAULT_HOST: str = "redis.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -97,31 +95,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host self._wrapped_methods: Dict[Callable, Callable] = {} @@ -223,14 +233,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -240,102 +250,107 @@ def operations_client(self): raise NotImplementedError() @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - Union[ - cloud_redis.ListInstancesResponse, - Awaitable[cloud_redis.ListInstancesResponse] - ]]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], + Union[ + cloud_redis.ListInstancesResponse, + Awaitable[cloud_redis.ListInstancesResponse], + ], + ]: raise NotImplementedError() @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - Union[ - cloud_redis.Instance, - Awaitable[cloud_redis.Instance] - ]]: + def get_instance( + self, + ) -> Callable[ + [cloud_redis.GetInstanceRequest], + Union[cloud_redis.Instance, Awaitable[cloud_redis.Instance]], + ]: raise NotImplementedError() @property - def get_instance_auth_string(self) -> Callable[ - [cloud_redis.GetInstanceAuthStringRequest], - Union[ - cloud_redis.InstanceAuthString, - Awaitable[cloud_redis.InstanceAuthString] - ]]: + def get_instance_auth_string( + self, + ) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + Union[ + cloud_redis.InstanceAuthString, Awaitable[cloud_redis.InstanceAuthString] + ], + ]: raise NotImplementedError() @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_instance( + self, + ) -> Callable[ + [cloud_redis.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_instance( + self, + ) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def upgrade_instance(self) -> Callable[ - [cloud_redis.UpgradeInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def upgrade_instance( + self, + ) -> Callable[ + [cloud_redis.UpgradeInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def import_instance(self) -> Callable[ - [cloud_redis.ImportInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def import_instance( + self, + ) -> Callable[ + [cloud_redis.ImportInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def export_instance(self) -> Callable[ - [cloud_redis.ExportInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def export_instance( + self, + ) -> Callable[ + [cloud_redis.ExportInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def failover_instance(self) -> Callable[ - [cloud_redis.FailoverInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def failover_instance( + self, + ) -> Callable[ + [cloud_redis.FailoverInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_instance( + self, + ) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def reschedule_maintenance(self) -> Callable[ - [cloud_redis.RescheduleMaintenanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property @@ -343,7 +358,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -384,7 +402,8 @@ def wait_operation( raise NotImplementedError() @property - def get_location(self, + def get_location( + self, ) -> Callable[ [locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], @@ -392,10 +411,14 @@ def get_location(self, raise NotImplementedError() @property - def list_locations(self, + def list_locations( + self, ) -> Callable[ [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], ]: raise NotImplementedError() @@ -404,6 +427,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'CloudRedisTransport', -) +__all__ = ("CloudRedisTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index a3f26f435bd5..34a42125c53c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -19,25 +19,23 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson +import google.auth # type: ignore import google.protobuf.message - import grpc # type: ignore import proto # type: ignore - -from google.cloud.location import locations_pb2 # type: ignore +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore -from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson + +from .base import DEFAULT_CLIENT_INFO, CloudRedisTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -47,7 +45,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -68,7 +68,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -79,7 +79,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -94,7 +98,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": client_call_details.method, "response": grpc_response, @@ -136,23 +140,26 @@ class CloudRedisGrpcTransport(CloudRedisTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -280,19 +287,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -328,13 +339,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -354,9 +364,11 @@ def operations_client(self) -> operations_v1.OperationsClient: return self._operations_client @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse + ]: r"""Return a callable for the list instances method over gRPC. Lists all Redis instances owned by a project in either the @@ -380,18 +392,18 @@ def list_instances(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ListInstances', + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ListInstances", request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, ) - return self._stubs['list_instances'] + return self._stubs["list_instances"] @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: r"""Return a callable for the get instance method over gRPC. Gets the details of a specific Redis instance. @@ -406,18 +418,20 @@ def get_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstance', + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstance", request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, ) - return self._stubs['get_instance'] + return self._stubs["get_instance"] @property - def get_instance_auth_string(self) -> Callable[ - [cloud_redis.GetInstanceAuthStringRequest], - cloud_redis.InstanceAuthString]: + def get_instance_auth_string( + self, + ) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], cloud_redis.InstanceAuthString + ]: r"""Return a callable for the get instance auth string method over gRPC. Gets the AUTH string for a Redis instance. If AUTH is @@ -435,18 +449,18 @@ def get_instance_auth_string(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance_auth_string' not in self._stubs: - self._stubs['get_instance_auth_string'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', + if "get_instance_auth_string" not in self._stubs: + self._stubs["get_instance_auth_string"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString", request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, response_deserializer=cloud_redis.InstanceAuthString.deserialize, ) - return self._stubs['get_instance_auth_string'] + return self._stubs["get_instance_auth_string"] @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the create instance method over gRPC. Creates a Redis instance based on the specified tier and memory @@ -474,18 +488,18 @@ def create_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/CreateInstance', + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/CreateInstance", request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_instance'] + return self._stubs["create_instance"] @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the update instance method over gRPC. Updates the metadata and configuration of a specific @@ -505,18 +519,18 @@ def update_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpdateInstance", request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_instance'] + return self._stubs["update_instance"] @property - def upgrade_instance(self) -> Callable[ - [cloud_redis.UpgradeInstanceRequest], - operations_pb2.Operation]: + def upgrade_instance( + self, + ) -> Callable[[cloud_redis.UpgradeInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the upgrade instance method over gRPC. Upgrades Redis instance to the newer Redis version @@ -532,18 +546,18 @@ def upgrade_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'upgrade_instance' not in self._stubs: - self._stubs['upgrade_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpgradeInstance', + if "upgrade_instance" not in self._stubs: + self._stubs["upgrade_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpgradeInstance", request_serializer=cloud_redis.UpgradeInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['upgrade_instance'] + return self._stubs["upgrade_instance"] @property - def import_instance(self) -> Callable[ - [cloud_redis.ImportInstanceRequest], - operations_pb2.Operation]: + def import_instance( + self, + ) -> Callable[[cloud_redis.ImportInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the import instance method over gRPC. Import a Redis RDB snapshot file from Cloud Storage @@ -566,18 +580,18 @@ def import_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'import_instance' not in self._stubs: - self._stubs['import_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ImportInstance', + if "import_instance" not in self._stubs: + self._stubs["import_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ImportInstance", request_serializer=cloud_redis.ImportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['import_instance'] + return self._stubs["import_instance"] @property - def export_instance(self) -> Callable[ - [cloud_redis.ExportInstanceRequest], - operations_pb2.Operation]: + def export_instance( + self, + ) -> Callable[[cloud_redis.ExportInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the export instance method over gRPC. Export Redis instance data into a Redis RDB format @@ -597,18 +611,18 @@ def export_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'export_instance' not in self._stubs: - self._stubs['export_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ExportInstance', + if "export_instance" not in self._stubs: + self._stubs["export_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ExportInstance", request_serializer=cloud_redis.ExportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['export_instance'] + return self._stubs["export_instance"] @property - def failover_instance(self) -> Callable[ - [cloud_redis.FailoverInstanceRequest], - operations_pb2.Operation]: + def failover_instance( + self, + ) -> Callable[[cloud_redis.FailoverInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the failover instance method over gRPC. Initiates a failover of the primary node to current @@ -625,18 +639,18 @@ def failover_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'failover_instance' not in self._stubs: - self._stubs['failover_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/FailoverInstance', + if "failover_instance" not in self._stubs: + self._stubs["failover_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/FailoverInstance", request_serializer=cloud_redis.FailoverInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['failover_instance'] + return self._stubs["failover_instance"] @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the delete instance method over gRPC. Deletes a specific Redis instance. Instance stops @@ -652,18 +666,18 @@ def delete_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/DeleteInstance", request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_instance'] + return self._stubs["delete_instance"] @property - def reschedule_maintenance(self) -> Callable[ - [cloud_redis.RescheduleMaintenanceRequest], - operations_pb2.Operation]: + def reschedule_maintenance( + self, + ) -> Callable[[cloud_redis.RescheduleMaintenanceRequest], operations_pb2.Operation]: r"""Return a callable for the reschedule maintenance method over gRPC. Reschedule maintenance for a given instance in a @@ -679,13 +693,13 @@ def reschedule_maintenance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'reschedule_maintenance' not in self._stubs: - self._stubs['reschedule_maintenance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance", request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['reschedule_maintenance'] + return self._stubs["reschedule_maintenance"] def close(self): self._logged_channel.close() @@ -694,8 +708,7 @@ def close(self): def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -712,8 +725,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -730,8 +742,7 @@ def cancel_operation( def wait_operation( self, ) -> Callable[[operations_pb2.WaitOperationRequest], None]: - r"""Return a callable for the wait_operation method over gRPC. - """ + r"""Return a callable for the wait_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -748,8 +759,7 @@ def wait_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -765,9 +775,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -783,9 +794,10 @@ def list_operations( @property def list_locations( self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -802,8 +814,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -821,6 +832,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'CloudRedisGrpcTransport', -) +__all__ = ("CloudRedisGrpcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index ffc910b13a68..fa5dfdfdd29e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -15,33 +15,31 @@ # import inspect import json -import pickle import logging as std_logging +import pickle import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore -from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, CloudRedisTransport from .grpc import CloudRedisGrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -49,9 +47,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -72,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -83,7 +85,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -98,7 +104,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -145,13 +151,15 @@ class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -182,24 +190,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -329,7 +339,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -360,9 +372,11 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: return self._operations_client @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - Awaitable[cloud_redis.ListInstancesResponse]]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], Awaitable[cloud_redis.ListInstancesResponse] + ]: r"""Return a callable for the list instances method over gRPC. Lists all Redis instances owned by a project in either the @@ -386,18 +400,18 @@ def list_instances(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ListInstances', + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ListInstances", request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, ) - return self._stubs['list_instances'] + return self._stubs["list_instances"] @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - Awaitable[cloud_redis.Instance]]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], Awaitable[cloud_redis.Instance]]: r"""Return a callable for the get instance method over gRPC. Gets the details of a specific Redis instance. @@ -412,18 +426,21 @@ def get_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstance', + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstance", request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, ) - return self._stubs['get_instance'] + return self._stubs["get_instance"] @property - def get_instance_auth_string(self) -> Callable[ - [cloud_redis.GetInstanceAuthStringRequest], - Awaitable[cloud_redis.InstanceAuthString]]: + def get_instance_auth_string( + self, + ) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + Awaitable[cloud_redis.InstanceAuthString], + ]: r"""Return a callable for the get instance auth string method over gRPC. Gets the AUTH string for a Redis instance. If AUTH is @@ -441,18 +458,20 @@ def get_instance_auth_string(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance_auth_string' not in self._stubs: - self._stubs['get_instance_auth_string'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', + if "get_instance_auth_string" not in self._stubs: + self._stubs["get_instance_auth_string"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString", request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, response_deserializer=cloud_redis.InstanceAuthString.deserialize, ) - return self._stubs['get_instance_auth_string'] + return self._stubs["get_instance_auth_string"] @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def create_instance( + self, + ) -> Callable[ + [cloud_redis.CreateInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the create instance method over gRPC. Creates a Redis instance based on the specified tier and memory @@ -480,18 +499,20 @@ def create_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/CreateInstance', + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/CreateInstance", request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_instance'] + return self._stubs["create_instance"] @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def update_instance( + self, + ) -> Callable[ + [cloud_redis.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the update instance method over gRPC. Updates the metadata and configuration of a specific @@ -511,18 +532,20 @@ def update_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpdateInstance", request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_instance'] + return self._stubs["update_instance"] @property - def upgrade_instance(self) -> Callable[ - [cloud_redis.UpgradeInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def upgrade_instance( + self, + ) -> Callable[ + [cloud_redis.UpgradeInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the upgrade instance method over gRPC. Upgrades Redis instance to the newer Redis version @@ -538,18 +561,20 @@ def upgrade_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'upgrade_instance' not in self._stubs: - self._stubs['upgrade_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpgradeInstance', + if "upgrade_instance" not in self._stubs: + self._stubs["upgrade_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpgradeInstance", request_serializer=cloud_redis.UpgradeInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['upgrade_instance'] + return self._stubs["upgrade_instance"] @property - def import_instance(self) -> Callable[ - [cloud_redis.ImportInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def import_instance( + self, + ) -> Callable[ + [cloud_redis.ImportInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the import instance method over gRPC. Import a Redis RDB snapshot file from Cloud Storage @@ -572,18 +597,20 @@ def import_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'import_instance' not in self._stubs: - self._stubs['import_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ImportInstance', + if "import_instance" not in self._stubs: + self._stubs["import_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ImportInstance", request_serializer=cloud_redis.ImportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['import_instance'] + return self._stubs["import_instance"] @property - def export_instance(self) -> Callable[ - [cloud_redis.ExportInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def export_instance( + self, + ) -> Callable[ + [cloud_redis.ExportInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the export instance method over gRPC. Export Redis instance data into a Redis RDB format @@ -603,18 +630,20 @@ def export_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'export_instance' not in self._stubs: - self._stubs['export_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ExportInstance', + if "export_instance" not in self._stubs: + self._stubs["export_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ExportInstance", request_serializer=cloud_redis.ExportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['export_instance'] + return self._stubs["export_instance"] @property - def failover_instance(self) -> Callable[ - [cloud_redis.FailoverInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def failover_instance( + self, + ) -> Callable[ + [cloud_redis.FailoverInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the failover instance method over gRPC. Initiates a failover of the primary node to current @@ -631,18 +660,20 @@ def failover_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'failover_instance' not in self._stubs: - self._stubs['failover_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/FailoverInstance', + if "failover_instance" not in self._stubs: + self._stubs["failover_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/FailoverInstance", request_serializer=cloud_redis.FailoverInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['failover_instance'] + return self._stubs["failover_instance"] @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def delete_instance( + self, + ) -> Callable[ + [cloud_redis.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the delete instance method over gRPC. Deletes a specific Redis instance. Instance stops @@ -658,18 +689,20 @@ def delete_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/DeleteInstance", request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_instance'] + return self._stubs["delete_instance"] @property - def reschedule_maintenance(self) -> Callable[ - [cloud_redis.RescheduleMaintenanceRequest], - Awaitable[operations_pb2.Operation]]: + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the reschedule maintenance method over gRPC. Reschedule maintenance for a given instance in a @@ -685,16 +718,16 @@ def reschedule_maintenance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'reschedule_maintenance' not in self._stubs: - self._stubs['reschedule_maintenance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance", request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['reschedule_maintenance'] + return self._stubs["reschedule_maintenance"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_instances: self._wrap_method( self.list_instances, @@ -804,8 +837,7 @@ def kind(self) -> str: def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -822,8 +854,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -840,8 +871,7 @@ def cancel_operation( def wait_operation( self, ) -> Callable[[operations_pb2.WaitOperationRequest], None]: - r"""Return a callable for the wait_operation method over gRPC. - """ + r"""Return a callable for the wait_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -858,8 +888,7 @@ def wait_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -875,9 +904,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -893,9 +923,10 @@ def list_operations( @property def list_locations( self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -912,8 +943,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -927,6 +957,4 @@ def get_location( return self._stubs["get_location"] -__all__ = ( - 'CloudRedisGrpcAsyncIOTransport', -) +__all__ = ("CloudRedisGrpcAsyncIOTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 766a583552af..ccf7821ef6a7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -13,34 +13,26 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging +import dataclasses import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +import google.protobuf from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version - -from .rest_base import _BaseCloudRedisRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseCloudRedisRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -49,6 +41,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -173,7 +166,14 @@ def post_upgrade_instance(self, response): """ - def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_create_instance( + self, + request: cloud_redis.CreateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -181,7 +181,9 @@ def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metada """ return request, metadata - def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance DEPRECATED. Please use the `post_create_instance_with_metadata` @@ -194,7 +196,11 @@ def post_create_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_instance Override in a subclass to read or manipulate the response or metadata after it @@ -209,7 +215,13 @@ def post_create_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_instance( + self, + request: cloud_redis.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -217,7 +229,9 @@ def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metada """ return request, metadata - def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance DEPRECATED. Please use the `post_delete_instance_with_metadata` @@ -230,7 +244,11 @@ def post_delete_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_instance Override in a subclass to read or manipulate the response or metadata after it @@ -245,7 +263,13 @@ def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_export_instance( + self, + request: cloud_redis.ExportInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for export_instance Override in a subclass to manipulate the request or metadata @@ -253,7 +277,9 @@ def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metada """ return request, metadata - def post_export_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_export_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for export_instance DEPRECATED. Please use the `post_export_instance_with_metadata` @@ -266,7 +292,11 @@ def post_export_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_export_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_export_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for export_instance Override in a subclass to read or manipulate the response or metadata after it @@ -281,7 +311,13 @@ def post_export_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_failover_instance( + self, + request: cloud_redis.FailoverInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for failover_instance Override in a subclass to manipulate the request or metadata @@ -289,7 +325,9 @@ def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, me """ return request, metadata - def post_failover_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_failover_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for failover_instance DEPRECATED. Please use the `post_failover_instance_with_metadata` @@ -302,7 +340,11 @@ def post_failover_instance(self, response: operations_pb2.Operation) -> operatio """ return response - def post_failover_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_failover_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for failover_instance Override in a subclass to read or manipulate the response or metadata after it @@ -317,7 +359,11 @@ def post_failover_instance_with_metadata(self, response: operations_pb2.Operatio """ return response, metadata - def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_instance( + self, + request: cloud_redis.GetInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -338,7 +384,11 @@ def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Insta """ return response - def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_instance_with_metadata( + self, + response: cloud_redis.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance Override in a subclass to read or manipulate the response or metadata after it @@ -353,7 +403,14 @@ def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metada """ return response, metadata - def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_instance_auth_string( + self, + request: cloud_redis.GetInstanceAuthStringRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.GetInstanceAuthStringRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for get_instance_auth_string Override in a subclass to manipulate the request or metadata @@ -361,7 +418,9 @@ def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStrin """ return request, metadata - def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString) -> cloud_redis.InstanceAuthString: + def post_get_instance_auth_string( + self, response: cloud_redis.InstanceAuthString + ) -> cloud_redis.InstanceAuthString: """Post-rpc interceptor for get_instance_auth_string DEPRECATED. Please use the `post_get_instance_auth_string_with_metadata` @@ -374,7 +433,11 @@ def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString """ return response - def post_get_instance_auth_string_with_metadata(self, response: cloud_redis.InstanceAuthString, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_instance_auth_string_with_metadata( + self, + response: cloud_redis.InstanceAuthString, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance_auth_string Override in a subclass to read or manipulate the response or metadata after it @@ -389,7 +452,13 @@ def post_get_instance_auth_string_with_metadata(self, response: cloud_redis.Inst """ return response, metadata - def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_import_instance( + self, + request: cloud_redis.ImportInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for import_instance Override in a subclass to manipulate the request or metadata @@ -397,7 +466,9 @@ def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metada """ return request, metadata - def post_import_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_import_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for import_instance DEPRECATED. Please use the `post_import_instance_with_metadata` @@ -410,7 +481,11 @@ def post_import_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_import_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_import_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for import_instance Override in a subclass to read or manipulate the response or metadata after it @@ -425,7 +500,13 @@ def post_import_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_instances( + self, + request: cloud_redis.ListInstancesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -433,7 +514,9 @@ def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata """ return request, metadata - def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + def post_list_instances( + self, response: cloud_redis.ListInstancesResponse + ) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances DEPRECATED. Please use the `post_list_instances_with_metadata` @@ -446,7 +529,13 @@ def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cl """ return response - def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_instances_with_metadata( + self, + response: cloud_redis.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for list_instances Override in a subclass to read or manipulate the response or metadata after it @@ -461,7 +550,14 @@ def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesR """ return response, metadata - def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_reschedule_maintenance( + self, + request: cloud_redis.RescheduleMaintenanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.RescheduleMaintenanceRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for reschedule_maintenance Override in a subclass to manipulate the request or metadata @@ -469,7 +565,9 @@ def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceR """ return request, metadata - def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_reschedule_maintenance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for reschedule_maintenance DEPRECATED. Please use the `post_reschedule_maintenance_with_metadata` @@ -482,7 +580,11 @@ def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> ope """ return response - def post_reschedule_maintenance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_reschedule_maintenance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for reschedule_maintenance Override in a subclass to read or manipulate the response or metadata after it @@ -497,7 +599,13 @@ def post_reschedule_maintenance_with_metadata(self, response: operations_pb2.Ope """ return response, metadata - def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_instance( + self, + request: cloud_redis.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -505,7 +613,9 @@ def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metada """ return request, metadata - def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance DEPRECATED. Please use the `post_update_instance_with_metadata` @@ -518,7 +628,11 @@ def post_update_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_instance Override in a subclass to read or manipulate the response or metadata after it @@ -533,7 +647,13 @@ def post_update_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_upgrade_instance( + self, + request: cloud_redis.UpgradeInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for upgrade_instance Override in a subclass to manipulate the request or metadata @@ -541,7 +661,9 @@ def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, meta """ return request, metadata - def post_upgrade_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_upgrade_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for upgrade_instance DEPRECATED. Please use the `post_upgrade_instance_with_metadata` @@ -554,7 +676,11 @@ def post_upgrade_instance(self, response: operations_pb2.Operation) -> operation """ return response - def post_upgrade_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_upgrade_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for upgrade_instance Override in a subclass to read or manipulate the response or metadata after it @@ -570,8 +696,12 @@ def post_upgrade_instance_with_metadata(self, response: operations_pb2.Operation return response, metadata def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -591,8 +721,12 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -612,8 +746,12 @@ def post_list_locations( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -621,9 +759,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: None - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -633,8 +769,12 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -642,9 +782,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: None - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -654,8 +792,12 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -675,8 +817,12 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -696,8 +842,12 @@ def post_list_operations( return response def pre_wait_operation( - self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.WaitOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for wait_operation Override in a subclass to manipulate the request or metadata @@ -756,62 +906,63 @@ class CloudRedisRestTransport(_BaseCloudRedisRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[CloudRedisRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudRedisRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - interceptor (Optional[CloudRedisRestInterceptor]): Interceptor used - to manipulate requests, request metadata, and responses. - api_audience (Optional[str]): The intended audience for the API calls - to the service that will be set when using certain 3rd party - authentication flows. Audience is typically a resource identifier. - If not set, the host value will be used as a default. + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[CloudRedisRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -823,10 +974,11 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -843,53 +995,58 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], - 'google.longrunning.Operations.WaitOperation': [ + "google.longrunning.Operations.WaitOperation": [ { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client - class _CreateInstance(_BaseCloudRedisRestTransport._BaseCreateInstance, CloudRedisRestStub): + class _CreateInstance( + _BaseCloudRedisRestTransport._BaseCreateInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.CreateInstance") @@ -901,27 +1058,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -944,32 +1103,48 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_create_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "httpRequest": http_request, @@ -978,7 +1153,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -991,20 +1174,24 @@ def __call__(self, resp = self._interceptor.post_create_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.create_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "metadata": http_response["headers"], @@ -1013,7 +1200,9 @@ def __call__(self, ) return resp - class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, CloudRedisRestStub): + class _DeleteInstance( + _BaseCloudRedisRestTransport._BaseDeleteInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.DeleteInstance") @@ -1025,26 +1214,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -1067,30 +1258,42 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + ) request, metadata = self._interceptor.pre_delete_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "httpRequest": http_request, @@ -1099,7 +1302,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1112,20 +1322,24 @@ def __call__(self, resp = self._interceptor.post_delete_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.delete_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "metadata": http_response["headers"], @@ -1134,7 +1348,9 @@ def __call__(self, ) return resp - class _ExportInstance(_BaseCloudRedisRestTransport._BaseExportInstance, CloudRedisRestStub): + class _ExportInstance( + _BaseCloudRedisRestTransport._BaseExportInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.ExportInstance") @@ -1146,27 +1362,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.ExportInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.ExportInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the export instance method over HTTP. Args: @@ -1189,32 +1407,48 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseExportInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseExportInstance._get_http_options() + ) request, metadata = self._interceptor.pre_export_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseExportInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseCloudRedisRestTransport._BaseExportInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseExportInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseExportInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ExportInstance", "httpRequest": http_request, @@ -1223,7 +1457,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ExportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._ExportInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1236,20 +1478,24 @@ def __call__(self, resp = self._interceptor.post_export_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_export_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.export_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ExportInstance", "metadata": http_response["headers"], @@ -1258,7 +1504,9 @@ def __call__(self, ) return resp - class _FailoverInstance(_BaseCloudRedisRestTransport._BaseFailoverInstance, CloudRedisRestStub): + class _FailoverInstance( + _BaseCloudRedisRestTransport._BaseFailoverInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.FailoverInstance") @@ -1270,27 +1518,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.FailoverInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.FailoverInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the failover instance method over HTTP. Args: @@ -1313,32 +1563,46 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseFailoverInstance._get_http_options() + ) - request, metadata = self._interceptor.pre_failover_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_failover_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_request_body_json(transcoded_request) + body = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "FailoverInstance", "httpRequest": http_request, @@ -1347,7 +1611,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._FailoverInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._FailoverInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1360,20 +1632,24 @@ def __call__(self, resp = self._interceptor.post_failover_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_failover_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_failover_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.failover_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "FailoverInstance", "metadata": http_response["headers"], @@ -1382,7 +1658,9 @@ def __call__(self, ) return resp - class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, CloudRedisRestStub): + class _GetInstance( + _BaseCloudRedisRestTransport._BaseGetInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.GetInstance") @@ -1394,26 +1672,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.Instance: + def __call__( + self, + request: cloud_redis.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. Args: @@ -1433,30 +1713,44 @@ def __call__(self, A Memorystore for Redis instance. """ - http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + ) request, metadata = self._interceptor.pre_get_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "httpRequest": http_request, @@ -1465,7 +1759,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1480,20 +1781,24 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = cloud_redis.Instance.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.get_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "metadata": http_response["headers"], @@ -1502,7 +1807,9 @@ def __call__(self, ) return resp - class _GetInstanceAuthString(_BaseCloudRedisRestTransport._BaseGetInstanceAuthString, CloudRedisRestStub): + class _GetInstanceAuthString( + _BaseCloudRedisRestTransport._BaseGetInstanceAuthString, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.GetInstanceAuthString") @@ -1514,26 +1821,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.GetInstanceAuthStringRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.InstanceAuthString: + def __call__( + self, + request: cloud_redis.GetInstanceAuthStringRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.InstanceAuthString: r"""Call the get instance auth string method over HTTP. Args: @@ -1555,28 +1864,38 @@ def __call__(self, http_options = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_http_options() - request, metadata = self._interceptor.pre_get_instance_auth_string(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_get_instance_auth_string( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstanceAuthString", "httpRequest": http_request, @@ -1585,7 +1904,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetInstanceAuthString._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetInstanceAuthString._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1600,20 +1926,24 @@ def __call__(self, resp = self._interceptor.post_get_instance_auth_string(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_instance_auth_string_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_instance_auth_string_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = cloud_redis.InstanceAuthString.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.get_instance_auth_string", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstanceAuthString", "metadata": http_response["headers"], @@ -1622,7 +1952,9 @@ def __call__(self, ) return resp - class _ImportInstance(_BaseCloudRedisRestTransport._BaseImportInstance, CloudRedisRestStub): + class _ImportInstance( + _BaseCloudRedisRestTransport._BaseImportInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.ImportInstance") @@ -1634,27 +1966,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.ImportInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.ImportInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the import instance method over HTTP. Args: @@ -1677,32 +2011,48 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseImportInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseImportInstance._get_http_options() + ) request, metadata = self._interceptor.pre_import_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseImportInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseCloudRedisRestTransport._BaseImportInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseImportInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseImportInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ImportInstance", "httpRequest": http_request, @@ -1711,7 +2061,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ImportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._ImportInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1724,20 +2082,24 @@ def __call__(self, resp = self._interceptor.post_import_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_import_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_import_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.import_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ImportInstance", "metadata": http_response["headers"], @@ -1746,7 +2108,9 @@ def __call__(self, ) return resp - class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, CloudRedisRestStub): + class _ListInstances( + _BaseCloudRedisRestTransport._BaseListInstances, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.ListInstances") @@ -1758,26 +2122,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.ListInstancesResponse: + def __call__( + self, + request: cloud_redis.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -1799,30 +2165,44 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + ) request, metadata = self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "httpRequest": http_request, @@ -1831,7 +2211,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1846,20 +2233,26 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = cloud_redis.ListInstancesResponse.to_json(response) + response_payload = cloud_redis.ListInstancesResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.list_instances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "metadata": http_response["headers"], @@ -1868,7 +2261,9 @@ def __call__(self, ) return resp - class _RescheduleMaintenance(_BaseCloudRedisRestTransport._BaseRescheduleMaintenance, CloudRedisRestStub): + class _RescheduleMaintenance( + _BaseCloudRedisRestTransport._BaseRescheduleMaintenance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.RescheduleMaintenance") @@ -1880,27 +2275,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.RescheduleMaintenanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.RescheduleMaintenanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the reschedule maintenance method over HTTP. Args: @@ -1925,30 +2322,42 @@ def __call__(self, http_options = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_http_options() - request, metadata = self._interceptor.pre_reschedule_maintenance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_reschedule_maintenance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_request_body_json(transcoded_request) + body = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "RescheduleMaintenance", "httpRequest": http_request, @@ -1957,7 +2366,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._RescheduleMaintenance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._RescheduleMaintenance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1970,20 +2387,24 @@ def __call__(self, resp = self._interceptor.post_reschedule_maintenance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_reschedule_maintenance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_reschedule_maintenance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.reschedule_maintenance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "RescheduleMaintenance", "metadata": http_response["headers"], @@ -1992,7 +2413,9 @@ def __call__(self, ) return resp - class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, CloudRedisRestStub): + class _UpdateInstance( + _BaseCloudRedisRestTransport._BaseUpdateInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.UpdateInstance") @@ -2004,27 +2427,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -2047,32 +2472,48 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_update_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "httpRequest": http_request, @@ -2081,7 +2522,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2094,20 +2543,24 @@ def __call__(self, resp = self._interceptor.post_update_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.update_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "metadata": http_response["headers"], @@ -2116,7 +2569,9 @@ def __call__(self, ) return resp - class _UpgradeInstance(_BaseCloudRedisRestTransport._BaseUpgradeInstance, CloudRedisRestStub): + class _UpgradeInstance( + _BaseCloudRedisRestTransport._BaseUpgradeInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.UpgradeInstance") @@ -2128,27 +2583,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.UpgradeInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.UpgradeInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the upgrade instance method over HTTP. Args: @@ -2171,32 +2628,46 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_http_options() + ) - request, metadata = self._interceptor.pre_upgrade_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_upgrade_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_request_body_json(transcoded_request) + body = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpgradeInstance", "httpRequest": http_request, @@ -2205,7 +2676,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._UpgradeInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._UpgradeInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2218,20 +2697,24 @@ def __call__(self, resp = self._interceptor.post_upgrade_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_upgrade_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_upgrade_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.upgrade_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpgradeInstance", "metadata": http_response["headers"], @@ -2241,98 +2724,104 @@ def __call__(self, return resp @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore @property - def export_instance(self) -> Callable[ - [cloud_redis.ExportInstanceRequest], - operations_pb2.Operation]: + def export_instance( + self, + ) -> Callable[[cloud_redis.ExportInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore + return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore @property - def failover_instance(self) -> Callable[ - [cloud_redis.FailoverInstanceRequest], - operations_pb2.Operation]: + def failover_instance( + self, + ) -> Callable[[cloud_redis.FailoverInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore + return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance_auth_string(self) -> Callable[ - [cloud_redis.GetInstanceAuthStringRequest], - cloud_redis.InstanceAuthString]: + def get_instance_auth_string( + self, + ) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], cloud_redis.InstanceAuthString + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore @property - def import_instance(self) -> Callable[ - [cloud_redis.ImportInstanceRequest], - operations_pb2.Operation]: + def import_instance( + self, + ) -> Callable[[cloud_redis.ImportInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore + return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore @property - def reschedule_maintenance(self) -> Callable[ - [cloud_redis.RescheduleMaintenanceRequest], - operations_pb2.Operation]: + def reschedule_maintenance( + self, + ) -> Callable[[cloud_redis.RescheduleMaintenanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def upgrade_instance(self) -> Callable[ - [cloud_redis.UpgradeInstanceRequest], - operations_pb2.Operation]: + def upgrade_instance( + self, + ) -> Callable[[cloud_redis.UpgradeInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub): + class _GetLocation( + _BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.GetLocation") @@ -2344,27 +2833,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -2382,30 +2872,44 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + ) request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpRequest": http_request, @@ -2414,7 +2918,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2425,19 +2936,21 @@ def __call__(self, resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpResponse": http_response, @@ -2448,9 +2961,11 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub): + class _ListLocations( + _BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.ListLocations") @@ -2462,27 +2977,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -2500,30 +3016,44 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + ) request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpRequest": http_request, @@ -2532,7 +3062,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2543,19 +3080,21 @@ def __call__(self, resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpResponse": http_response, @@ -2566,9 +3105,11 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub): + class _CancelOperation( + _BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.CancelOperation") @@ -2580,27 +3121,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -2615,30 +3157,42 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + ) - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CancelOperation", "httpRequest": http_request, @@ -2647,7 +3201,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2658,9 +3219,11 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub): + class _DeleteOperation( + _BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.DeleteOperation") @@ -2672,27 +3235,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -2707,30 +3271,42 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + ) - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteOperation", "httpRequest": http_request, @@ -2739,7 +3315,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2750,9 +3333,11 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub): + class _GetOperation( + _BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.GetOperation") @@ -2764,27 +3349,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -2802,30 +3388,44 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpRequest": http_request, @@ -2834,7 +3434,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2845,19 +3452,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpResponse": http_response, @@ -2868,9 +3477,11 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub): + class _ListOperations( + _BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.ListOperations") @@ -2882,27 +3493,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -2920,30 +3532,42 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpRequest": http_request, @@ -2952,7 +3576,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2963,19 +3594,21 @@ def __call__(self, resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpResponse": http_response, @@ -2986,9 +3619,11 @@ def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore - class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, CloudRedisRestStub): + class _WaitOperation( + _BaseCloudRedisRestTransport._BaseWaitOperation, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.WaitOperation") @@ -3000,28 +3635,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: operations_pb2.WaitOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.WaitOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the wait operation method over HTTP. Args: @@ -3039,32 +3675,50 @@ def __call__(self, operations_pb2.Operation: Response from WaitOperation method. """ - http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + ) request, metadata = self._interceptor.pre_wait_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request( + http_options, request + ) + ) - body = _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpRequest": http_request, @@ -3073,7 +3727,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._WaitOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3084,19 +3746,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_wait_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpResponse": http_response, @@ -3113,6 +3777,4 @@ def close(self): self._session.close() -__all__=( - 'CloudRedisRestTransport', -) +__all__ = ("CloudRedisRestTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index e6440e410c16..6bbcfeba7418 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -15,47 +15,45 @@ # import google.auth + try: - import aiohttp # type: ignore - from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore - from google.api_core import rest_streaming_async # type: ignore - from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore + import aiohttp # type: ignore + from google.api_core import rest_streaming_async # type: ignore + from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore + from google.auth.aio.transport.sessions import ( + AsyncAuthorizedSession, # type: ignore + ) except ImportError as e: # pragma: NO COVER - raise ImportError("`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install google-cloud-redis[async_rest]`") from e + raise ImportError( + "`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install google-cloud-redis[async_rest]`" + ) from e -from google.auth.aio import credentials as ga_credentials_async # type: ignore +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import google.protobuf from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore +from google.api_core import ( + gapic_v1, + operations_v1, + rest_helpers, + rest_streaming_async, # type: ignore +) from google.api_core import retry_async as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming_async # type: ignore -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - -import json # type: ignore -import dataclasses -from typing import Any, Dict, List, Callable, Tuple, Optional, Sequence, Union - - +from google.auth.aio import credentials as ga_credentials_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseCloudRedisRestTransport +from google.protobuf import json_format from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -import logging +from .rest_base import _BaseCloudRedisRestTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -185,7 +183,14 @@ async def post_upgrade_instance(self, response): """ - async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + async def pre_create_instance( + self, + request: cloud_redis.CreateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -193,7 +198,9 @@ async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, """ return request, metadata - async def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance DEPRECATED. Please use the `post_create_instance_with_metadata` @@ -206,7 +213,11 @@ async def post_create_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_instance Override in a subclass to read or manipulate the response or metadata after it @@ -221,7 +232,13 @@ async def post_create_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_delete_instance( + self, + request: cloud_redis.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -229,7 +246,9 @@ async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, """ return request, metadata - async def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance DEPRECATED. Please use the `post_delete_instance_with_metadata` @@ -242,7 +261,11 @@ async def post_delete_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_instance Override in a subclass to read or manipulate the response or metadata after it @@ -257,7 +280,13 @@ async def post_delete_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_export_instance( + self, + request: cloud_redis.ExportInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for export_instance Override in a subclass to manipulate the request or metadata @@ -265,7 +294,9 @@ async def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, """ return request, metadata - async def post_export_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_export_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for export_instance DEPRECATED. Please use the `post_export_instance_with_metadata` @@ -278,7 +309,11 @@ async def post_export_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_export_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_export_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for export_instance Override in a subclass to read or manipulate the response or metadata after it @@ -293,7 +328,13 @@ async def post_export_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_failover_instance( + self, + request: cloud_redis.FailoverInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for failover_instance Override in a subclass to manipulate the request or metadata @@ -301,7 +342,9 @@ async def pre_failover_instance(self, request: cloud_redis.FailoverInstanceReque """ return request, metadata - async def post_failover_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_failover_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for failover_instance DEPRECATED. Please use the `post_failover_instance_with_metadata` @@ -314,7 +357,11 @@ async def post_failover_instance(self, response: operations_pb2.Operation) -> op """ return response - async def post_failover_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_failover_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for failover_instance Override in a subclass to read or manipulate the response or metadata after it @@ -329,7 +376,11 @@ async def post_failover_instance_with_metadata(self, response: operations_pb2.Op """ return response, metadata - async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_get_instance( + self, + request: cloud_redis.GetInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -337,7 +388,9 @@ async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metada """ return request, metadata - async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: + async def post_get_instance( + self, response: cloud_redis.Instance + ) -> cloud_redis.Instance: """Post-rpc interceptor for get_instance DEPRECATED. Please use the `post_get_instance_with_metadata` @@ -350,7 +403,11 @@ async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis """ return response - async def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_get_instance_with_metadata( + self, + response: cloud_redis.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance Override in a subclass to read or manipulate the response or metadata after it @@ -365,7 +422,14 @@ async def post_get_instance_with_metadata(self, response: cloud_redis.Instance, """ return response, metadata - async def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_get_instance_auth_string( + self, + request: cloud_redis.GetInstanceAuthStringRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.GetInstanceAuthStringRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for get_instance_auth_string Override in a subclass to manipulate the request or metadata @@ -373,7 +437,9 @@ async def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAut """ return request, metadata - async def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString) -> cloud_redis.InstanceAuthString: + async def post_get_instance_auth_string( + self, response: cloud_redis.InstanceAuthString + ) -> cloud_redis.InstanceAuthString: """Post-rpc interceptor for get_instance_auth_string DEPRECATED. Please use the `post_get_instance_auth_string_with_metadata` @@ -386,7 +452,11 @@ async def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuth """ return response - async def post_get_instance_auth_string_with_metadata(self, response: cloud_redis.InstanceAuthString, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_get_instance_auth_string_with_metadata( + self, + response: cloud_redis.InstanceAuthString, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance_auth_string Override in a subclass to read or manipulate the response or metadata after it @@ -401,7 +471,13 @@ async def post_get_instance_auth_string_with_metadata(self, response: cloud_redi """ return response, metadata - async def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_import_instance( + self, + request: cloud_redis.ImportInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for import_instance Override in a subclass to manipulate the request or metadata @@ -409,7 +485,9 @@ async def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, """ return request, metadata - async def post_import_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_import_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for import_instance DEPRECATED. Please use the `post_import_instance_with_metadata` @@ -422,7 +500,11 @@ async def post_import_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_import_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_import_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for import_instance Override in a subclass to read or manipulate the response or metadata after it @@ -437,7 +519,13 @@ async def post_import_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_list_instances( + self, + request: cloud_redis.ListInstancesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -445,7 +533,9 @@ async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, me """ return request, metadata - async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + async def post_list_instances( + self, response: cloud_redis.ListInstancesResponse + ) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances DEPRECATED. Please use the `post_list_instances_with_metadata` @@ -458,7 +548,13 @@ async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) """ return response - async def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_list_instances_with_metadata( + self, + response: cloud_redis.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for list_instances Override in a subclass to read or manipulate the response or metadata after it @@ -473,7 +569,14 @@ async def post_list_instances_with_metadata(self, response: cloud_redis.ListInst """ return response, metadata - async def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_reschedule_maintenance( + self, + request: cloud_redis.RescheduleMaintenanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.RescheduleMaintenanceRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for reschedule_maintenance Override in a subclass to manipulate the request or metadata @@ -481,7 +584,9 @@ async def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMainte """ return request, metadata - async def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_reschedule_maintenance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for reschedule_maintenance DEPRECATED. Please use the `post_reschedule_maintenance_with_metadata` @@ -494,7 +599,11 @@ async def post_reschedule_maintenance(self, response: operations_pb2.Operation) """ return response - async def post_reschedule_maintenance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_reschedule_maintenance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for reschedule_maintenance Override in a subclass to read or manipulate the response or metadata after it @@ -509,7 +618,13 @@ async def post_reschedule_maintenance_with_metadata(self, response: operations_p """ return response, metadata - async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_update_instance( + self, + request: cloud_redis.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -517,7 +632,9 @@ async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, """ return request, metadata - async def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance DEPRECATED. Please use the `post_update_instance_with_metadata` @@ -530,7 +647,11 @@ async def post_update_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_instance Override in a subclass to read or manipulate the response or metadata after it @@ -545,7 +666,13 @@ async def post_update_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_upgrade_instance( + self, + request: cloud_redis.UpgradeInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for upgrade_instance Override in a subclass to manipulate the request or metadata @@ -553,7 +680,9 @@ async def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest """ return request, metadata - async def post_upgrade_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_upgrade_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for upgrade_instance DEPRECATED. Please use the `post_upgrade_instance_with_metadata` @@ -566,7 +695,11 @@ async def post_upgrade_instance(self, response: operations_pb2.Operation) -> ope """ return response - async def post_upgrade_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_upgrade_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for upgrade_instance Override in a subclass to read or manipulate the response or metadata after it @@ -582,8 +715,12 @@ async def post_upgrade_instance_with_metadata(self, response: operations_pb2.Ope return response, metadata async def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -603,8 +740,12 @@ async def post_get_location( return response async def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -624,8 +765,12 @@ async def post_list_locations( return response async def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -633,9 +778,7 @@ async def pre_cancel_operation( """ return request, metadata - async def post_cancel_operation( - self, response: None - ) -> None: + async def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -645,8 +788,12 @@ async def post_cancel_operation( return response async def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -654,9 +801,7 @@ async def pre_delete_operation( """ return request, metadata - async def post_delete_operation( - self, response: None - ) -> None: + async def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -666,8 +811,12 @@ async def post_delete_operation( return response async def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -687,8 +836,12 @@ async def post_get_operation( return response async def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -708,8 +861,12 @@ async def post_list_operations( return response async def pre_wait_operation( - self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.WaitOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for wait_operation Override in a subclass to manipulate the request or metadata @@ -735,6 +892,7 @@ class AsyncCloudRedisRestStub: _host: str _interceptor: AsyncCloudRedisRestInterceptor + class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): """Asynchronous REST backend transport for CloudRedis. @@ -766,38 +924,40 @@ class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, - *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials_async.Credentials] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - url_scheme: str = 'https', - interceptor: Optional[AsyncCloudRedisRestInterceptor] = None, - ) -> None: + + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials_async.Credentials] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + url_scheme: str = "https", + interceptor: Optional[AsyncCloudRedisRestInterceptor] = None, + ) -> None: """Instantiate the transport. - NOTE: This async REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). - credentials (Optional[google.auth.aio.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - url_scheme (str): the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - interceptor (Optional[AsyncCloudRedisRestInterceptor]): Interceptor used - to manipulate requests, request metadata, and responses. + NOTE: This async REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[google.auth.aio.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + url_scheme (str): the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[AsyncCloudRedisRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. """ # Run the base constructor super().__init__( @@ -806,16 +966,18 @@ def __init__(self, client_info=client_info, always_use_jwt_access=False, url_scheme=url_scheme, - api_audience=None + api_audience=None, ) self._session = AsyncAuthorizedSession(self._credentials) # type: ignore self._interceptor = interceptor or AsyncCloudRedisRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_instances: self._wrap_method( self.list_instances, @@ -914,7 +1076,9 @@ def _wrap_method(self, func, *args, **kwargs): kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - class _CreateInstance(_BaseCloudRedisRestTransport._BaseCreateInstance, AsyncCloudRedisRestStub): + class _CreateInstance( + _BaseCloudRedisRestTransport._BaseCreateInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.CreateInstance") @@ -926,27 +1090,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -969,32 +1135,50 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_create_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_create_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "httpRequest": http_request, @@ -1003,16 +1187,28 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = operations_pb2.Operation() @@ -1021,20 +1217,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_create_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_create_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.create_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "metadata": http_response["headers"], @@ -1044,7 +1244,9 @@ async def __call__(self, return resp - class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, AsyncCloudRedisRestStub): + class _DeleteInstance( + _BaseCloudRedisRestTransport._BaseDeleteInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.DeleteInstance") @@ -1056,26 +1258,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -1098,30 +1302,44 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_delete_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_delete_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "httpRequest": http_request, @@ -1130,16 +1348,27 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = operations_pb2.Operation() @@ -1148,20 +1377,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_delete_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.delete_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "metadata": http_response["headers"], @@ -1171,7 +1404,9 @@ async def __call__(self, return resp - class _ExportInstance(_BaseCloudRedisRestTransport._BaseExportInstance, AsyncCloudRedisRestStub): + class _ExportInstance( + _BaseCloudRedisRestTransport._BaseExportInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.ExportInstance") @@ -1183,27 +1418,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.ExportInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.ExportInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the export instance method over HTTP. Args: @@ -1226,32 +1463,50 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseExportInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseExportInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_export_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseExportInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_export_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseExportInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseExportInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseExportInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ExportInstance", "httpRequest": http_request, @@ -1260,16 +1515,28 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ExportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._ExportInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = operations_pb2.Operation() @@ -1278,20 +1545,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_export_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_export_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_export_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.export_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ExportInstance", "metadata": http_response["headers"], @@ -1301,7 +1572,9 @@ async def __call__(self, return resp - class _FailoverInstance(_BaseCloudRedisRestTransport._BaseFailoverInstance, AsyncCloudRedisRestStub): + class _FailoverInstance( + _BaseCloudRedisRestTransport._BaseFailoverInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.FailoverInstance") @@ -1313,27 +1586,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.FailoverInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.FailoverInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the failover instance method over HTTP. Args: @@ -1356,32 +1631,46 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseFailoverInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_failover_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_failover_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_request_body_json(transcoded_request) + body = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) + query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json( + transcoded_request + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "FailoverInstance", "httpRequest": http_request, @@ -1390,16 +1679,30 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._FailoverInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = ( + await AsyncCloudRedisRestTransport._FailoverInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = operations_pb2.Operation() @@ -1408,20 +1711,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_failover_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_failover_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_failover_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.failover_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "FailoverInstance", "metadata": http_response["headers"], @@ -1431,7 +1738,9 @@ async def __call__(self, return resp - class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, AsyncCloudRedisRestStub): + class _GetInstance( + _BaseCloudRedisRestTransport._BaseGetInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.GetInstance") @@ -1443,26 +1752,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.Instance: + async def __call__( + self, + request: cloud_redis.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. Args: @@ -1482,30 +1793,46 @@ async def __call__(self, A Memorystore for Redis instance. """ - http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_get_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_get_instance( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "httpRequest": http_request, @@ -1514,16 +1841,27 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = cloud_redis.Instance() @@ -1532,20 +1870,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_get_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_get_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = cloud_redis.Instance.to_json(response) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.get_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "metadata": http_response["headers"], @@ -1555,7 +1897,9 @@ async def __call__(self, return resp - class _GetInstanceAuthString(_BaseCloudRedisRestTransport._BaseGetInstanceAuthString, AsyncCloudRedisRestStub): + class _GetInstanceAuthString( + _BaseCloudRedisRestTransport._BaseGetInstanceAuthString, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.GetInstanceAuthString") @@ -1567,26 +1911,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.GetInstanceAuthStringRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.InstanceAuthString: + async def __call__( + self, + request: cloud_redis.GetInstanceAuthStringRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.InstanceAuthString: r"""Call the get instance auth string method over HTTP. Args: @@ -1608,28 +1954,38 @@ async def __call__(self, http_options = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_http_options() - request, metadata = await self._interceptor.pre_get_instance_auth_string(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_get_instance_auth_string( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) + query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json( + transcoded_request + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstanceAuthString", "httpRequest": http_request, @@ -1638,16 +1994,29 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetInstanceAuthString._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + await AsyncCloudRedisRestTransport._GetInstanceAuthString._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = cloud_redis.InstanceAuthString() @@ -1656,20 +2025,27 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_get_instance_auth_string(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_get_instance_auth_string_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + ( + resp, + _, + ) = await self._interceptor.post_get_instance_auth_string_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = cloud_redis.InstanceAuthString.to_json(response) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.get_instance_auth_string", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstanceAuthString", "metadata": http_response["headers"], @@ -1679,7 +2055,9 @@ async def __call__(self, return resp - class _ImportInstance(_BaseCloudRedisRestTransport._BaseImportInstance, AsyncCloudRedisRestStub): + class _ImportInstance( + _BaseCloudRedisRestTransport._BaseImportInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.ImportInstance") @@ -1691,27 +2069,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.ImportInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.ImportInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the import instance method over HTTP. Args: @@ -1734,32 +2114,50 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseImportInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseImportInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_import_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseImportInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_import_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseImportInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseImportInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseImportInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ImportInstance", "httpRequest": http_request, @@ -1768,16 +2166,28 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ImportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._ImportInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = operations_pb2.Operation() @@ -1786,20 +2196,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_import_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_import_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_import_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.import_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ImportInstance", "metadata": http_response["headers"], @@ -1809,7 +2223,9 @@ async def __call__(self, return resp - class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, AsyncCloudRedisRestStub): + class _ListInstances( + _BaseCloudRedisRestTransport._BaseListInstances, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.ListInstances") @@ -1821,26 +2237,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.ListInstancesResponse: + async def __call__( + self, + request: cloud_redis.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -1862,30 +2280,46 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + ) - request, metadata = await self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_list_instances( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "httpRequest": http_request, @@ -1894,16 +2328,27 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = cloud_redis.ListInstancesResponse() @@ -1912,20 +2357,26 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_list_instances(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_list_instances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = cloud_redis.ListInstancesResponse.to_json(response) + response_payload = cloud_redis.ListInstancesResponse.to_json( + response + ) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.list_instances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "metadata": http_response["headers"], @@ -1935,7 +2386,9 @@ async def __call__(self, return resp - class _RescheduleMaintenance(_BaseCloudRedisRestTransport._BaseRescheduleMaintenance, AsyncCloudRedisRestStub): + class _RescheduleMaintenance( + _BaseCloudRedisRestTransport._BaseRescheduleMaintenance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.RescheduleMaintenance") @@ -1947,27 +2400,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.RescheduleMaintenanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.RescheduleMaintenanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the reschedule maintenance method over HTTP. Args: @@ -1992,30 +2447,42 @@ async def __call__(self, http_options = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_http_options() - request, metadata = await self._interceptor.pre_reschedule_maintenance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_reschedule_maintenance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_request_body_json(transcoded_request) + body = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) + query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json( + transcoded_request + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "RescheduleMaintenance", "httpRequest": http_request, @@ -2024,16 +2491,30 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._RescheduleMaintenance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = ( + await AsyncCloudRedisRestTransport._RescheduleMaintenance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = operations_pb2.Operation() @@ -2042,20 +2523,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_reschedule_maintenance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_reschedule_maintenance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_reschedule_maintenance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.reschedule_maintenance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "RescheduleMaintenance", "metadata": http_response["headers"], @@ -2065,7 +2550,9 @@ async def __call__(self, return resp - class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, AsyncCloudRedisRestStub): + class _UpdateInstance( + _BaseCloudRedisRestTransport._BaseUpdateInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.UpdateInstance") @@ -2077,27 +2564,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -2120,32 +2609,50 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_update_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_update_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "httpRequest": http_request, @@ -2154,16 +2661,28 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = operations_pb2.Operation() @@ -2172,20 +2691,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_update_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_update_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.update_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "metadata": http_response["headers"], @@ -2195,7 +2718,9 @@ async def __call__(self, return resp - class _UpgradeInstance(_BaseCloudRedisRestTransport._BaseUpgradeInstance, AsyncCloudRedisRestStub): + class _UpgradeInstance( + _BaseCloudRedisRestTransport._BaseUpgradeInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.UpgradeInstance") @@ -2207,27 +2732,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.UpgradeInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.UpgradeInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the upgrade instance method over HTTP. Args: @@ -2250,32 +2777,46 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_upgrade_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_upgrade_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_request_body_json(transcoded_request) + body = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) + query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json( + transcoded_request + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpgradeInstance", "httpRequest": http_request, @@ -2284,16 +2825,30 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._UpgradeInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = ( + await AsyncCloudRedisRestTransport._UpgradeInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = operations_pb2.Operation() @@ -2302,20 +2857,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_upgrade_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_upgrade_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_upgrade_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.upgrade_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpgradeInstance", "metadata": http_response["headers"], @@ -2335,123 +2894,131 @@ def operations_client(self) -> AsyncOperationsRestClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], - 'google.longrunning.Operations.WaitOperation': [ + "google.longrunning.Operations.WaitOperation": [ { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", }, ], } rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore - host=self._host, - # use the credentials which are saved - credentials=self._credentials, # type: ignore - http_options=http_options, - path_prefix="v1" + host=self._host, + # use the credentials which are saved + credentials=self._credentials, # type: ignore + http_options=http_options, + path_prefix="v1", ) - self._operations_client = AsyncOperationsRestClient(transport=rest_transport) + self._operations_client = AsyncOperationsRestClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore @property - def export_instance(self) -> Callable[ - [cloud_redis.ExportInstanceRequest], - operations_pb2.Operation]: + def export_instance( + self, + ) -> Callable[[cloud_redis.ExportInstanceRequest], operations_pb2.Operation]: return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore @property - def failover_instance(self) -> Callable[ - [cloud_redis.FailoverInstanceRequest], - operations_pb2.Operation]: + def failover_instance( + self, + ) -> Callable[[cloud_redis.FailoverInstanceRequest], operations_pb2.Operation]: return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance_auth_string(self) -> Callable[ - [cloud_redis.GetInstanceAuthStringRequest], - cloud_redis.InstanceAuthString]: + def get_instance_auth_string( + self, + ) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], cloud_redis.InstanceAuthString + ]: return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore @property - def import_instance(self) -> Callable[ - [cloud_redis.ImportInstanceRequest], - operations_pb2.Operation]: + def import_instance( + self, + ) -> Callable[[cloud_redis.ImportInstanceRequest], operations_pb2.Operation]: return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse + ]: return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore @property - def reschedule_maintenance(self) -> Callable[ - [cloud_redis.RescheduleMaintenanceRequest], - operations_pb2.Operation]: + def reschedule_maintenance( + self, + ) -> Callable[[cloud_redis.RescheduleMaintenanceRequest], operations_pb2.Operation]: return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def upgrade_instance(self) -> Callable[ - [cloud_redis.UpgradeInstanceRequest], - operations_pb2.Operation]: + def upgrade_instance( + self, + ) -> Callable[[cloud_redis.UpgradeInstanceRequest], operations_pb2.Operation]: return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, AsyncCloudRedisRestStub): + class _GetLocation( + _BaseCloudRedisRestTransport._BaseGetLocation, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.GetLocation") @@ -2463,27 +3030,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - + async def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -2501,30 +3069,46 @@ async def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + ) - request, metadata = await self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_get_location( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpRequest": http_request, @@ -2533,34 +3117,47 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore content = await response.read() resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpResponse": http_response, @@ -2571,9 +3168,11 @@ async def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, AsyncCloudRedisRestStub): + class _ListLocations( + _BaseCloudRedisRestTransport._BaseListLocations, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.ListLocations") @@ -2585,27 +3184,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - + async def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -2623,30 +3223,46 @@ async def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + ) - request, metadata = await self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_list_locations( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpRequest": http_request, @@ -2655,34 +3271,47 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore content = await response.read() resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpResponse": http_response, @@ -2693,9 +3322,11 @@ async def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, AsyncCloudRedisRestStub): + class _CancelOperation( + _BaseCloudRedisRestTransport._BaseCancelOperation, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.CancelOperation") @@ -2707,27 +3338,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + async def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -2742,30 +3374,42 @@ async def __call__(self, be of type `bytes`. """ - http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + ) - request, metadata = await self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CancelOperation", "httpRequest": http_request, @@ -2774,24 +3418,39 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + await AsyncCloudRedisRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore return await self._interceptor.post_cancel_operation(None) @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, AsyncCloudRedisRestStub): + class _DeleteOperation( + _BaseCloudRedisRestTransport._BaseDeleteOperation, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.DeleteOperation") @@ -2803,27 +3462,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + async def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -2838,30 +3498,42 @@ async def __call__(self, be of type `bytes`. """ - http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + ) - request, metadata = await self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteOperation", "httpRequest": http_request, @@ -2870,24 +3542,39 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + await AsyncCloudRedisRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore return await self._interceptor.post_delete_operation(None) @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, AsyncCloudRedisRestStub): + class _GetOperation( + _BaseCloudRedisRestTransport._BaseGetOperation, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.GetOperation") @@ -2899,27 +3586,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + async def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -2937,30 +3625,46 @@ async def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + ) - request, metadata = await self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_get_operation( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpRequest": http_request, @@ -2969,34 +3673,47 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore content = await response.read() resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpResponse": http_response, @@ -3007,9 +3724,11 @@ async def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, AsyncCloudRedisRestStub): + class _ListOperations( + _BaseCloudRedisRestTransport._BaseListOperations, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.ListOperations") @@ -3021,27 +3740,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - + async def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -3059,30 +3779,44 @@ async def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + ) - request, metadata = await self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_list_operations( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpRequest": http_request, @@ -3091,34 +3825,47 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore content = await response.read() resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpResponse": http_response, @@ -3129,9 +3876,11 @@ async def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore - class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, AsyncCloudRedisRestStub): + class _WaitOperation( + _BaseCloudRedisRestTransport._BaseWaitOperation, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.WaitOperation") @@ -3143,28 +3892,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: operations_pb2.WaitOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + async def __call__( + self, + request: operations_pb2.WaitOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the wait operation method over HTTP. Args: @@ -3182,32 +3932,52 @@ async def __call__(self, operations_pb2.Operation: Response from WaitOperation method. """ - http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + ) - request, metadata = await self._interceptor.pre_wait_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_wait_operation( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request( + http_options, request + ) + ) - body = _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpRequest": http_request, @@ -3216,34 +3986,48 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._WaitOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore content = await response.read() resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_wait_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpResponse": http_response, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py index 56df2832192a..4c41b5331cd1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -14,19 +14,16 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from .base import DEFAULT_CLIENT_INFO, CloudRedisTransport class _BaseCloudRedisRestTransport(CloudRedisTransport): @@ -42,14 +39,16 @@ class _BaseCloudRedisRestTransport(CloudRedisTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): @@ -73,7 +72,9 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -84,27 +85,33 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseCreateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "instanceId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + "body": "instance", + }, ] return http_options @@ -119,17 +126,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -137,19 +150,23 @@ class _BaseDeleteInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -161,11 +178,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseDeleteInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -173,20 +196,24 @@ class _BaseExportInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:export', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:export", + "body": "*", + }, ] return http_options @@ -201,17 +228,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseExportInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseExportInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -219,20 +252,24 @@ class _BaseFailoverInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:failover', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:failover", + "body": "*", + }, ] return http_options @@ -247,17 +284,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseFailoverInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseFailoverInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -265,19 +308,23 @@ class _BaseGetInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -289,11 +336,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseGetInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -301,19 +354,23 @@ class _BaseGetInstanceAuthString: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}/authString', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}/authString", + }, ] return http_options @@ -325,11 +382,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_unset_required_fields( + query_params + ) + ) return query_params @@ -337,20 +400,24 @@ class _BaseImportInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:import', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:import", + "body": "*", + }, ] return http_options @@ -365,17 +432,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseImportInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseImportInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -383,19 +456,23 @@ class _BaseListInstances: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + }, ] return http_options @@ -407,11 +484,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseListInstances._get_unset_required_fields( + query_params + ) + ) return query_params @@ -419,20 +502,24 @@ class _BaseRescheduleMaintenance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance", + "body": "*", + }, ] return http_options @@ -447,17 +534,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -465,20 +558,26 @@ class _BaseUpdateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance.name=projects/*/locations/*/instances/*}', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{instance.name=projects/*/locations/*/instances/*}", + "body": "instance", + }, ] return http_options @@ -493,17 +592,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -511,20 +616,24 @@ class _BaseUpgradeInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:upgrade', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:upgrade", + "body": "*", + }, ] return http_options @@ -539,17 +648,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseUpgradeInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -559,23 +674,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListLocations: @@ -584,23 +699,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseCancelOperation: @@ -609,23 +724,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseDeleteOperation: @@ -634,23 +749,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetOperation: @@ -659,23 +774,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListOperations: @@ -684,23 +799,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseWaitOperation: @@ -709,31 +824,30 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseCloudRedisRestTransport', -) +__all__ = ("_BaseCloudRedisRestTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py index 4103832ed203..faaca194d726 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py @@ -44,31 +44,31 @@ ) __all__ = ( - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'ExportInstanceRequest', - 'FailoverInstanceRequest', - 'GcsDestination', - 'GcsSource', - 'GetInstanceAuthStringRequest', - 'GetInstanceRequest', - 'ImportInstanceRequest', - 'InputConfig', - 'Instance', - 'InstanceAuthString', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'LocationMetadata', - 'MaintenancePolicy', - 'MaintenanceSchedule', - 'NodeInfo', - 'OperationMetadata', - 'OutputConfig', - 'PersistenceConfig', - 'RescheduleMaintenanceRequest', - 'TlsCertificate', - 'UpdateInstanceRequest', - 'UpgradeInstanceRequest', - 'WeeklyMaintenanceWindow', - 'ZoneMetadata', + "CreateInstanceRequest", + "DeleteInstanceRequest", + "ExportInstanceRequest", + "FailoverInstanceRequest", + "GcsDestination", + "GcsSource", + "GetInstanceAuthStringRequest", + "GetInstanceRequest", + "ImportInstanceRequest", + "InputConfig", + "Instance", + "InstanceAuthString", + "ListInstancesRequest", + "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "OutputConfig", + "PersistenceConfig", + "RescheduleMaintenanceRequest", + "TlsCertificate", + "UpdateInstanceRequest", + "UpgradeInstanceRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index 5178d3ff14b2..29e0ee02c919 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -17,45 +17,43 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.type.dayofweek_pb2 as dayofweek_pb2 # type: ignore import google.type.timeofday_pb2 as timeofday_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.redis.v1', + package="google.cloud.redis.v1", manifest={ - 'NodeInfo', - 'Instance', - 'PersistenceConfig', - 'RescheduleMaintenanceRequest', - 'MaintenancePolicy', - 'WeeklyMaintenanceWindow', - 'MaintenanceSchedule', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'GetInstanceRequest', - 'GetInstanceAuthStringRequest', - 'InstanceAuthString', - 'CreateInstanceRequest', - 'UpdateInstanceRequest', - 'UpgradeInstanceRequest', - 'DeleteInstanceRequest', - 'GcsSource', - 'InputConfig', - 'ImportInstanceRequest', - 'GcsDestination', - 'OutputConfig', - 'ExportInstanceRequest', - 'FailoverInstanceRequest', - 'OperationMetadata', - 'LocationMetadata', - 'ZoneMetadata', - 'TlsCertificate', + "NodeInfo", + "Instance", + "PersistenceConfig", + "RescheduleMaintenanceRequest", + "MaintenancePolicy", + "WeeklyMaintenanceWindow", + "MaintenanceSchedule", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "GetInstanceAuthStringRequest", + "InstanceAuthString", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "UpgradeInstanceRequest", + "DeleteInstanceRequest", + "GcsSource", + "InputConfig", + "ImportInstanceRequest", + "GcsDestination", + "OutputConfig", + "ExportInstanceRequest", + "FailoverInstanceRequest", + "OperationMetadata", + "LocationMetadata", + "ZoneMetadata", + "TlsCertificate", }, ) @@ -266,6 +264,7 @@ class Instance(proto.Message): Optional. The available maintenance versions that an instance could update to. """ + class State(proto.Enum): r"""Represents the different states of a Redis instance. @@ -297,6 +296,7 @@ class State(proto.Enum): Redis instance is failing over (availability may be affected). """ + STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 @@ -318,6 +318,7 @@ class Tier(proto.Enum): STANDARD_HA (3): STANDARD_HA tier: highly available primary/replica instances """ + TIER_UNSPECIFIED = 0 BASIC = 1 STANDARD_HA = 3 @@ -337,6 +338,7 @@ class ConnectMode(proto.Enum): access provides an IP address range for multiple Google Cloud services, including Memorystore. """ + CONNECT_MODE_UNSPECIFIED = 0 DIRECT_PEERING = 1 PRIVATE_SERVICE_ACCESS = 2 @@ -353,6 +355,7 @@ class TransitEncryptionMode(proto.Enum): DISABLED (2): TLS is disabled for the instance. """ + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED = 0 SERVER_AUTHENTICATION = 1 DISABLED = 2 @@ -373,6 +376,7 @@ class ReadReplicasMode(proto.Enum): and the instance can scale up and down the number of replicas. Not valid for basic tier. """ + READ_REPLICAS_MODE_UNSPECIFIED = 0 READ_REPLICAS_DISABLED = 1 READ_REPLICAS_ENABLED = 2 @@ -388,6 +392,7 @@ class SuspensionReason(proto.Enum): Something wrong with the CMEK key provided by customer. """ + SUSPENSION_REASON_UNSPECIFIED = 0 CUSTOMER_MANAGED_KEY_ISSUE = 1 @@ -481,34 +486,34 @@ class SuspensionReason(proto.Enum): proto.BOOL, number=23, ) - server_ca_certs: MutableSequence['TlsCertificate'] = proto.RepeatedField( + server_ca_certs: MutableSequence["TlsCertificate"] = proto.RepeatedField( proto.MESSAGE, number=25, - message='TlsCertificate', + message="TlsCertificate", ) transit_encryption_mode: TransitEncryptionMode = proto.Field( proto.ENUM, number=26, enum=TransitEncryptionMode, ) - maintenance_policy: 'MaintenancePolicy' = proto.Field( + maintenance_policy: "MaintenancePolicy" = proto.Field( proto.MESSAGE, number=27, - message='MaintenancePolicy', + message="MaintenancePolicy", ) - maintenance_schedule: 'MaintenanceSchedule' = proto.Field( + maintenance_schedule: "MaintenanceSchedule" = proto.Field( proto.MESSAGE, number=28, - message='MaintenanceSchedule', + message="MaintenanceSchedule", ) replica_count: int = proto.Field( proto.INT32, number=31, ) - nodes: MutableSequence['NodeInfo'] = proto.RepeatedField( + nodes: MutableSequence["NodeInfo"] = proto.RepeatedField( proto.MESSAGE, number=32, - message='NodeInfo', + message="NodeInfo", ) read_endpoint: str = proto.Field( proto.STRING, @@ -527,10 +532,10 @@ class SuspensionReason(proto.Enum): proto.STRING, number=36, ) - persistence_config: 'PersistenceConfig' = proto.Field( + persistence_config: "PersistenceConfig" = proto.Field( proto.MESSAGE, number=37, - message='PersistenceConfig', + message="PersistenceConfig", ) suspension_reasons: MutableSequence[SuspensionReason] = proto.RepeatedField( proto.ENUM, @@ -572,6 +577,7 @@ class PersistenceConfig(proto.Message): future snapshots will be aligned. If not provided, the current time will be used. """ + class PersistenceMode(proto.Enum): r"""Available Persistence modes. @@ -584,6 +590,7 @@ class PersistenceMode(proto.Enum): RDB (2): RDB based Persistence is enabled. """ + PERSISTENCE_MODE_UNSPECIFIED = 0 DISABLED = 1 RDB = 2 @@ -603,6 +610,7 @@ class SnapshotPeriod(proto.Enum): TWENTY_FOUR_HOURS (6): Snapshot every 24 hours. """ + SNAPSHOT_PERIOD_UNSPECIFIED = 0 ONE_HOUR = 3 SIX_HOURS = 4 @@ -648,6 +656,7 @@ class RescheduleMaintenanceRequest(proto.Message): rescheduled to if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, for example ``2012-11-15T16:19:00.094Z``. """ + class RescheduleType(proto.Enum): r"""Reschedule options. @@ -665,6 +674,7 @@ class RescheduleType(proto.Enum): If the user wants to reschedule the maintenance to a specific time. """ + RESCHEDULE_TYPE_UNSPECIFIED = 0 IMMEDIATE = 1 NEXT_AVAILABLE_WINDOW = 2 @@ -720,10 +730,12 @@ class MaintenancePolicy(proto.Message): proto.STRING, number=3, ) - weekly_maintenance_window: MutableSequence['WeeklyMaintenanceWindow'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='WeeklyMaintenanceWindow', + weekly_maintenance_window: MutableSequence["WeeklyMaintenanceWindow"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=4, + message="WeeklyMaintenanceWindow", + ) ) @@ -869,10 +881,10 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances: MutableSequence['Instance'] = proto.RepeatedField( + instances: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance', + message="Instance", ) next_page_token: str = proto.Field( proto.STRING, @@ -962,10 +974,10 @@ class CreateInstanceRequest(proto.Message): proto.STRING, number=2, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=3, - message='Instance', + message="Instance", ) @@ -995,10 +1007,10 @@ class UpdateInstanceRequest(proto.Message): number=1, message=field_mask_pb2.FieldMask, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=2, - message='Instance', + message="Instance", ) @@ -1071,11 +1083,11 @@ class InputConfig(proto.Message): This field is a member of `oneof`_ ``source``. """ - gcs_source: 'GcsSource' = proto.Field( + gcs_source: "GcsSource" = proto.Field( proto.MESSAGE, number=1, - oneof='source', - message='GcsSource', + oneof="source", + message="GcsSource", ) @@ -1096,10 +1108,10 @@ class ImportInstanceRequest(proto.Message): proto.STRING, number=1, ) - input_config: 'InputConfig' = proto.Field( + input_config: "InputConfig" = proto.Field( proto.MESSAGE, number=3, - message='InputConfig', + message="InputConfig", ) @@ -1132,11 +1144,11 @@ class OutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. """ - gcs_destination: 'GcsDestination' = proto.Field( + gcs_destination: "GcsDestination" = proto.Field( proto.MESSAGE, number=1, - oneof='destination', - message='GcsDestination', + oneof="destination", + message="GcsDestination", ) @@ -1157,10 +1169,10 @@ class ExportInstanceRequest(proto.Message): proto.STRING, number=1, ) - output_config: 'OutputConfig' = proto.Field( + output_config: "OutputConfig" = proto.Field( proto.MESSAGE, number=3, - message='OutputConfig', + message="OutputConfig", ) @@ -1178,6 +1190,7 @@ class FailoverInstanceRequest(proto.Message): choose. If it's unspecified, data protection mode will be LIMITED_DATA_LOSS by default. """ + class DataProtectionMode(proto.Enum): r"""Specifies different modes of operation in relation to the data retention. @@ -1196,6 +1209,7 @@ class DataProtectionMode(proto.Enum): Instance failover will be performed without data loss control. """ + DATA_PROTECTION_MODE_UNSPECIFIED = 0 LIMITED_DATA_LOSS = 1 FORCE_DATA_LOSS = 2 @@ -1279,11 +1293,11 @@ class LocationMetadata(proto.Message): instance. """ - available_zones: MutableMapping[str, 'ZoneMetadata'] = proto.MapField( + available_zones: MutableMapping[str, "ZoneMetadata"] = proto.MapField( proto.STRING, proto.MESSAGE, number=1, - message='ZoneMetadata', + message="ZoneMetadata", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index abaab5a4121d..3b773ab8cbd9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -17,9 +17,8 @@ import pathlib import re import shutil - -from typing import Dict, List import warnings +from typing import Dict, List import nox @@ -154,7 +153,8 @@ def lint(session): # 2. Check formatting session.run( - "ruff", "format", + "ruff", + "format", "--check", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", @@ -167,12 +167,15 @@ def lint(session): @nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): """(Deprecated) Legacy session. Please use 'nox -s format'.""" - session.log("WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future.") + session.log( + "WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future." + ) # Just run the ruff formatter (keeping legacy behavior of only formatting, not sorting imports) session.install(RUFF_VERSION) session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", *LINT_PATHS, @@ -191,8 +194,10 @@ def format(session): # check --select I: Enables strict import sorting # --fix: Applies the changes automatically session.run( - "ruff", "check", - "--select", "I", + "ruff", + "check", + "--select", + "I", "--fix", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length @@ -201,7 +206,8 @@ def format(session): # 3. Run Ruff to format code session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length *LINT_PATHS, @@ -386,8 +392,10 @@ def docs(session): "sphinx-build", "-T", # show full traceback on exception "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + "-b", + "html", # builder + "-d", + os.path.join("docs", "_build", "doctrees", ""), # cache directory # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py index 6cb8469248af..4c2cef2c45cf 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py @@ -60,4 +60,5 @@ async def sample_create_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_CreateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py index ea546b2795ac..d52c5aa49358 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py @@ -60,4 +60,5 @@ def sample_create_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_CreateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py index 45dcc1896a8d..75706750f56d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py @@ -53,4 +53,5 @@ async def sample_delete_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_DeleteInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py index 610246d96510..6aafcb787ba5 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py @@ -53,4 +53,5 @@ def sample_delete_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_DeleteInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py index dff957f67c40..938b0276a16d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py @@ -57,4 +57,5 @@ async def sample_export_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_ExportInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py index b6abe0bd10d0..5d89e2add938 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py @@ -57,4 +57,5 @@ def sample_export_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_ExportInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py index 85e18c6dff57..6588f7303d20 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py @@ -53,4 +53,5 @@ async def sample_failover_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_FailoverInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py index 34c7d735f872..0093e938d853 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py @@ -53,4 +53,5 @@ def sample_failover_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_FailoverInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py index 241282db64fb..5b3e2c759b9f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py @@ -49,4 +49,5 @@ async def sample_get_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_GetInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py index 832355185758..ebbe7b4ceffb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py @@ -49,4 +49,5 @@ async def sample_get_instance_auth_string(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_GetInstanceAuthString_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py index 0f8c811919e9..8799cbe046a4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py @@ -49,4 +49,5 @@ def sample_get_instance_auth_string(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_GetInstanceAuthString_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py index 5de5dd3bba9f..6c3054d1b6d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py @@ -49,4 +49,5 @@ def sample_get_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_GetInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py index 80679985001c..7ba230d11d39 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py @@ -57,4 +57,5 @@ async def sample_import_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_ImportInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py index 3a3f1fac1a51..67c9f79c2bc6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py @@ -57,4 +57,5 @@ def sample_import_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_ImportInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py index 67c9a3c86e46..0e2bb45a0bf9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py @@ -50,4 +50,5 @@ async def sample_list_instances(): async for response in page_result: print(response) + # [END redis_v1_generated_CloudRedis_ListInstances_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py index 2922fb554895..99b1b531b505 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py @@ -50,4 +50,5 @@ def sample_list_instances(): for response in page_result: print(response) + # [END redis_v1_generated_CloudRedis_ListInstances_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py index fd314bcf69f3..226651695385 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py @@ -54,4 +54,5 @@ async def sample_reschedule_maintenance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_RescheduleMaintenance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py index ac2089381ae2..e8a55d5f05df 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py @@ -54,4 +54,5 @@ def sample_reschedule_maintenance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_RescheduleMaintenance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py index 4243eca30122..4d4c30233a22 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py @@ -58,4 +58,5 @@ async def sample_update_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_UpdateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py index b5ae40fbab41..28d56e4bb691 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py @@ -58,4 +58,5 @@ def sample_update_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_UpdateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py index 43cba54d61af..06d1d04359d5 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py @@ -54,4 +54,5 @@ async def sample_upgrade_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_UpgradeInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py index 89cdfffef676..af3db7c451e1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py @@ -54,4 +54,5 @@ def sample_upgrade_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_UpgradeInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index e121207415e7..1ecce6251b63 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -17,20 +17,20 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-cloud-redis' +name = "google-cloud-redis" description = "Google Cloud Redis API client library" version = None -with open(os.path.join(package_root, 'google/cloud/redis/gapic_version.py')) as fp: +with open(os.path.join(package_root, "google/cloud/redis/gapic_version.py")) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": @@ -52,7 +52,7 @@ extras = { "async_rest": [ "google-api-core[grpc] >= 2.21.0, < 3.0.0", - "google-auth[aiohttp] >= 2.35.0, <3.0.0" + "google-auth[aiohttp] >= 2.35.0, <3.0.0", ], } url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 035e18dcd386..5a14e2cfae83 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,55 +22,37 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format import json import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc import pytest -from collections.abc import Sequence, Mapping from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +from grpc.experimental import aio from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule + try: import aiohttp # type: ignore - from google.auth.aio.transport.sessions import AsyncAuthorizedSession from google.api_core.operations_v1 import AsyncOperationsRestClient + from google.auth.aio.transport.sessions import AsyncAuthorizedSession + HAS_ASYNC_REST_EXTRA = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_ASYNC_REST_EXTRA = False -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session from google.protobuf import json_format +from requests import PreparedRequest, Request, Response +from requests.sessions import Session try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.location import locations_pb2 -from google.cloud.redis_v1.services.cloud_redis import CloudRedisAsyncClient -from google.cloud.redis_v1.services.cloud_redis import CloudRedisClient -from google.cloud.redis_v1.services.cloud_redis import pagers -from google.cloud.redis_v1.services.cloud_redis import transports -from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account import google.api_core.operation_async as operation_async # type: ignore import google.auth import google.protobuf.duration_pb2 as duration_pb2 # type: ignore @@ -78,8 +61,30 @@ import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.type.dayofweek_pb2 as dayofweek_pb2 # type: ignore import google.type.timeofday_pb2 as timeofday_pb2 # type: ignore - - +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.cloud.redis_v1.services.cloud_redis import ( + CloudRedisAsyncClient, + CloudRedisClient, + pagers, + transports, +) +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -94,9 +99,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -104,17 +111,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -126,12 +143,26 @@ def test__get_default_mtls_endpoint(): custom_endpoint = ".custom" assert CloudRedisClient._get_default_mtls_endpoint(None) is None - assert CloudRedisClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert CloudRedisClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert CloudRedisClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert CloudRedisClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ( + CloudRedisClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CloudRedisClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudRedisClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudRedisClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) assert CloudRedisClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - assert CloudRedisClient._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + assert ( + CloudRedisClient._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + ) + def test__read_environment_variables(): assert CloudRedisClient._read_environment_variables() == (False, "auto", None) @@ -154,10 +185,10 @@ def test__read_environment_variables(): ) else: assert CloudRedisClient._read_environment_variables() == ( - False, - "auto", - None, - ) + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert CloudRedisClient._read_environment_variables() == (False, "never", None) @@ -171,10 +202,17 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: CloudRedisClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert CloudRedisClient._read_environment_variables() == (False, "auto", "foo.com") + assert CloudRedisClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -183,7 +221,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert CloudRedisClient._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -191,7 +231,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert CloudRedisClient._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -203,7 +245,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert CloudRedisClient._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -215,7 +259,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert CloudRedisClient._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -227,7 +273,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert CloudRedisClient._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -242,83 +290,167 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): CloudRedisClient._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert CloudRedisClient._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert CloudRedisClient._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert CloudRedisClient._get_client_cert_source(None, False) is None - assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + CloudRedisClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + CloudRedisClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + CloudRedisClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + CloudRedisClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert CloudRedisClient._get_client_cert_source(None, True) is mock_default_cert_source - assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) +@mock.patch.object( + CloudRedisClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisClient), +) +@mock.patch.object( + CloudRedisAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisAsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = CloudRedisClient._DEFAULT_UNIVERSE - default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert CloudRedisClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT - assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT - assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT - assert CloudRedisClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + CloudRedisClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + CloudRedisClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CloudRedisClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + CloudRedisClient._get_api_endpoint(None, None, default_universe, "always") + == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CloudRedisClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CloudRedisClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + CloudRedisClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + CloudRedisClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert CloudRedisClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert CloudRedisClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert CloudRedisClient._get_universe_domain(None, None) == CloudRedisClient._DEFAULT_UNIVERSE + assert ( + CloudRedisClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + CloudRedisClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + CloudRedisClient._get_universe_domain(None, None) + == CloudRedisClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: CloudRedisClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -334,7 +466,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -347,14 +480,20 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (CloudRedisClient, "grpc"), - (CloudRedisAsyncClient, "grpc_asyncio"), - (CloudRedisClient, "rest"), -]) + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudRedisClient, "grpc"), + (CloudRedisAsyncClient, "grpc_asyncio"), + (CloudRedisClient, "rest"), + ], +) def test_cloud_redis_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -362,52 +501,68 @@ def test_cloud_redis_client_from_service_account_info(client_class, transport_na assert isinstance(client, client_class) assert client.transport._host == ( - 'redis.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://redis.googleapis.com' + "redis.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://redis.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.CloudRedisGrpcTransport, "grpc"), - (transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.CloudRedisRestTransport, "rest"), -]) -def test_cloud_redis_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CloudRedisGrpcTransport, "grpc"), + (transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudRedisRestTransport, "rest"), + ], +) +def test_cloud_redis_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (CloudRedisClient, "grpc"), - (CloudRedisAsyncClient, "grpc_asyncio"), - (CloudRedisClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudRedisClient, "grpc"), + (CloudRedisAsyncClient, "grpc_asyncio"), + (CloudRedisClient, "rest"), + ], +) def test_cloud_redis_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'redis.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://redis.googleapis.com' + "redis.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://redis.googleapis.com" ) @@ -423,30 +578,45 @@ def test_cloud_redis_client_get_transport_class(): assert transport == transports.CloudRedisGrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), - (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), -]) -@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) -def test_cloud_redis_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), + ], +) +@mock.patch.object( + CloudRedisClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisClient), +) +@mock.patch.object( + CloudRedisAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisAsyncClient), +) +def test_cloud_redis_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudRedisClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(CloudRedisClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudRedisClient, 'get_transport_class') as gtc: + with mock.patch.object(CloudRedisClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -464,13 +634,15 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -482,7 +654,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -502,17 +674,22 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -521,48 +698,82 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "true"), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "false"), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "true"), - (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "false"), -]) -@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "true"), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "false"), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "true"), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CloudRedisClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisClient), +) +@mock.patch.object( + CloudRedisAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisAsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_cloud_redis_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -581,12 +792,22 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -607,15 +828,22 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -625,19 +853,27 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo ) -@pytest.mark.parametrize("client_class", [ - CloudRedisClient, CloudRedisAsyncClient -]) -@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +@pytest.mark.parametrize("client_class", [CloudRedisClient, CloudRedisAsyncClient]) +@mock.patch.object( + CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient) +) +@mock.patch.object( + CloudRedisAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudRedisAsyncClient), +) def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -645,18 +881,25 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -693,23 +936,23 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -740,23 +983,23 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -772,16 +1015,27 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -791,27 +1045,48 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + -@pytest.mark.parametrize("client_class", [ - CloudRedisClient, CloudRedisAsyncClient -]) -@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) +@pytest.mark.parametrize("client_class", [CloudRedisClient, CloudRedisAsyncClient]) +@mock.patch.object( + CloudRedisClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisClient), +) +@mock.patch.object( + CloudRedisAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisAsyncClient), +) def test_cloud_redis_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = CloudRedisClient._DEFAULT_UNIVERSE - default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -834,11 +1109,19 @@ def test_cloud_redis_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -846,27 +1129,40 @@ def test_cloud_redis_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), - (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), -]) -def test_cloud_redis_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), + ], +) +def test_cloud_redis_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -875,24 +1171,35 @@ def test_cloud_redis_client_client_options_scopes(client_class, transport_class, api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (CloudRedisClient, transports.CloudRedisRestTransport, "rest", None), -]) -def test_cloud_redis_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", None), + ], +) +def test_cloud_redis_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -901,12 +1208,13 @@ def test_cloud_redis_client_client_options_credentials_file(client_class, transp api_audience=None, ) + def test_cloud_redis_client_client_options_from_dict(): - with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None - client = CloudRedisClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) + client = CloudRedisClient(client_options={"api_endpoint": "squid.clam.whelk"}) grpc_transport.assert_called_once_with( credentials=None, credentials_file=None, @@ -920,23 +1228,33 @@ def test_cloud_redis_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_cloud_redis_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cloud_redis_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -946,13 +1264,13 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -963,9 +1281,7 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp credentials=file_creds, credentials_file=None, quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=None, default_host="redis.googleapis.com", ssl_credentials=None, @@ -976,11 +1292,14 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.ListInstancesRequest, - dict, -]) -def test_list_instances(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ListInstancesRequest, + dict, + ], +) +def test_list_instances(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -991,13 +1310,11 @@ def test_list_instances(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_instances(request) @@ -1009,8 +1326,8 @@ def test_list_instances(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_instances_non_empty_request_with_auto_populated_field(): @@ -1018,30 +1335,31 @@ def test_list_instances_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_instances(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_instances_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1060,7 +1378,9 @@ def test_list_instances_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc request = {} client.list_instances(request) @@ -1074,8 +1394,11 @@ def test_list_instances_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_instances_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1089,12 +1412,17 @@ async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_instances in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_instances + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_instances + ] = mock_rpc request = {} await client.list_instances(request) @@ -1108,8 +1436,11 @@ async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grp assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ListInstancesRequest): +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.ListInstancesRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1120,14 +1451,14 @@ async def test_list_instances_async(transport: str = 'grpc_asyncio', request_typ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) response = await client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -1138,14 +1469,15 @@ async def test_list_instances_async(transport: str = 'grpc_asyncio', request_typ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_instances_async_from_dict(): await test_list_instances_async(request_type=dict) + def test_list_instances_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1155,12 +1487,10 @@ def test_list_instances_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.ListInstancesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_redis.ListInstancesResponse() client.list_instances(request) @@ -1172,9 +1502,9 @@ def test_list_instances_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1187,13 +1517,13 @@ async def test_list_instances_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.ListInstancesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse()) + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.ListInstancesResponse() + ) await client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -1204,9 +1534,9 @@ async def test_list_instances_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_instances_flattened(): @@ -1215,15 +1545,13 @@ def test_list_instances_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_instances( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1231,7 +1559,7 @@ def test_list_instances_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1245,9 +1573,10 @@ def test_list_instances_flattened_error(): with pytest.raises(ValueError): client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_instances_flattened_async(): client = CloudRedisAsyncClient( @@ -1255,17 +1584,17 @@ async def test_list_instances_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.ListInstancesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_instances( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1273,9 +1602,10 @@ async def test_list_instances_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): client = CloudRedisAsyncClient( @@ -1287,7 +1617,7 @@ async def test_list_instances_flattened_error_async(): with pytest.raises(ValueError): await client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1298,9 +1628,7 @@ def test_list_instances_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -1309,17 +1637,17 @@ def test_list_instances_pager(transport_name: str = "grpc"): cloud_redis.Instance(), cloud_redis.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), cloud_redis.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), cloud_redis.ListInstancesResponse( instances=[ cloud_redis.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), cloud_redis.ListInstancesResponse( instances=[ @@ -1334,9 +1662,7 @@ def test_list_instances_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}, retry=retry, timeout=timeout) @@ -1346,8 +1672,9 @@ def test_list_instances_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_redis.Instance) - for i in results) + assert all(isinstance(i, cloud_redis.Instance) for i in results) + + def test_list_instances_pages(transport_name: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1355,9 +1682,7 @@ def test_list_instances_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -1366,17 +1691,17 @@ def test_list_instances_pages(transport_name: str = "grpc"): cloud_redis.Instance(), cloud_redis.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), cloud_redis.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), cloud_redis.ListInstancesResponse( instances=[ cloud_redis.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), cloud_redis.ListInstancesResponse( instances=[ @@ -1387,9 +1712,10 @@ def test_list_instances_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_instances(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_instances_async_pager(): client = CloudRedisAsyncClient( @@ -1398,8 +1724,8 @@ async def test_list_instances_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -1408,17 +1734,17 @@ async def test_list_instances_async_pager(): cloud_redis.Instance(), cloud_redis.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), cloud_redis.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), cloud_redis.ListInstancesResponse( instances=[ cloud_redis.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), cloud_redis.ListInstancesResponse( instances=[ @@ -1428,15 +1754,16 @@ async def test_list_instances_async_pager(): ), RuntimeError, ) - async_pager = await client.list_instances(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_instances( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloud_redis.Instance) - for i in responses) + assert all(isinstance(i, cloud_redis.Instance) for i in responses) @pytest.mark.asyncio @@ -1447,8 +1774,8 @@ async def test_list_instances_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -1457,17 +1784,17 @@ async def test_list_instances_async_pages(): cloud_redis.Instance(), cloud_redis.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), cloud_redis.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), cloud_redis.ListInstancesResponse( instances=[ cloud_redis.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), cloud_redis.ListInstancesResponse( instances=[ @@ -1480,18 +1807,22 @@ async def test_list_instances_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_instances(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceRequest, - dict, -]) -def test_get_instance(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.GetInstanceRequest, + dict, + ], +) +def test_get_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1502,38 +1833,38 @@ def test_get_instance(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + customer_managed_key="customer_managed_key_value", + suspension_reasons=[ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) response = client.get_instance(request) @@ -1545,33 +1876,43 @@ def test_get_instance(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True - assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert ( + response.transit_encryption_mode + == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + ) assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 - assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' - assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert ( + response.read_replicas_mode + == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + ) + assert response.customer_managed_key == "customer_managed_key_value" + assert response.suspension_reasons == [ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == [ + "available_maintenance_versions_value" + ] def test_get_instance_non_empty_request_with_auto_populated_field(): @@ -1579,28 +1920,29 @@ def test_get_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.GetInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceRequest( - name='name_value', + name="name_value", ) + def test_get_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1619,7 +1961,9 @@ def test_get_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc request = {} client.get_instance(request) @@ -1633,8 +1977,11 @@ def test_get_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1648,12 +1995,17 @@ async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_instance + ] = mock_rpc request = {} await client.get_instance(request) @@ -1667,8 +2019,11 @@ async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceRequest): +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.GetInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1679,39 +2034,41 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.Instance( + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", + port=453, + current_location_id="current_location_id_value", + state=cloud_redis.Instance.State.CREATING, + status_message="status_message_value", + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint="read_endpoint_value", + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key="customer_managed_key_value", + suspension_reasons=[ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], + ) + ) response = await client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1722,39 +2079,50 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True - assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert ( + response.transit_encryption_mode + == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + ) assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 - assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' - assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert ( + response.read_replicas_mode + == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + ) + assert response.customer_managed_key == "customer_managed_key_value" + assert response.suspension_reasons == [ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == [ + "available_maintenance_versions_value" + ] @pytest.mark.asyncio async def test_get_instance_async_from_dict(): await test_get_instance_async(request_type=dict) + def test_get_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1764,12 +2132,10 @@ def test_get_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_redis.Instance() client.get_instance(request) @@ -1781,9 +2147,9 @@ def test_get_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1796,13 +2162,13 @@ async def test_get_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance()) + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.Instance() + ) await client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1813,9 +2179,9 @@ async def test_get_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_instance_flattened(): @@ -1824,15 +2190,13 @@ def test_get_instance_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1840,7 +2204,7 @@ def test_get_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -1854,9 +2218,10 @@ def test_get_instance_flattened_error(): with pytest.raises(ValueError): client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -1864,17 +2229,17 @@ async def test_get_instance_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.Instance() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1882,9 +2247,10 @@ async def test_get_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -1896,15 +2262,18 @@ async def test_get_instance_flattened_error_async(): with pytest.raises(ValueError): await client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceAuthStringRequest, - dict, -]) -def test_get_instance_auth_string(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.GetInstanceAuthStringRequest, + dict, + ], +) +def test_get_instance_auth_string(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1916,11 +2285,11 @@ def test_get_instance_auth_string(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: + type(client.transport.get_instance_auth_string), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.InstanceAuthString( - auth_string='auth_string_value', + auth_string="auth_string_value", ) response = client.get_instance_auth_string(request) @@ -1932,7 +2301,7 @@ def test_get_instance_auth_string(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.InstanceAuthString) - assert response.auth_string == 'auth_string_value' + assert response.auth_string == "auth_string_value" def test_get_instance_auth_string_non_empty_request_with_auto_populated_field(): @@ -1940,28 +2309,31 @@ def test_get_instance_auth_string_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.GetInstanceAuthStringRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.get_instance_auth_string), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_instance_auth_string(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceAuthStringRequest( - name='name_value', + name="name_value", ) + def test_get_instance_auth_string_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1976,12 +2348,19 @@ def test_get_instance_auth_string_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_instance_auth_string in client._transport._wrapped_methods + assert ( + client._transport.get_instance_auth_string + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance_auth_string] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_instance_auth_string + ] = mock_rpc request = {} client.get_instance_auth_string(request) @@ -1994,8 +2373,11 @@ def test_get_instance_auth_string_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_instance_auth_string_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_instance_auth_string_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2009,12 +2391,17 @@ async def test_get_instance_auth_string_async_use_cached_wrapped_rpc(transport: wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_instance_auth_string in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_instance_auth_string + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_instance_auth_string] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_instance_auth_string + ] = mock_rpc request = {} await client.get_instance_auth_string(request) @@ -2028,8 +2415,12 @@ async def test_get_instance_auth_string_async_use_cached_wrapped_rpc(transport: assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceAuthStringRequest): +async def test_get_instance_auth_string_async( + transport: str = "grpc_asyncio", + request_type=cloud_redis.GetInstanceAuthStringRequest, +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2041,12 +2432,14 @@ async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', r # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: + type(client.transport.get_instance_auth_string), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( - auth_string='auth_string_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.InstanceAuthString( + auth_string="auth_string_value", + ) + ) response = await client.get_instance_auth_string(request) # Establish that the underlying gRPC stub method was called. @@ -2057,13 +2450,14 @@ async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', r # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.InstanceAuthString) - assert response.auth_string == 'auth_string_value' + assert response.auth_string == "auth_string_value" @pytest.mark.asyncio async def test_get_instance_auth_string_async_from_dict(): await test_get_instance_auth_string_async(request_type=dict) + def test_get_instance_auth_string_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2073,12 +2467,12 @@ def test_get_instance_auth_string_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceAuthStringRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: + type(client.transport.get_instance_auth_string), "__call__" + ) as call: call.return_value = cloud_redis.InstanceAuthString() client.get_instance_auth_string(request) @@ -2090,9 +2484,9 @@ def test_get_instance_auth_string_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2105,13 +2499,15 @@ async def test_get_instance_auth_string_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceAuthStringRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString()) + type(client.transport.get_instance_auth_string), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.InstanceAuthString() + ) await client.get_instance_auth_string(request) # Establish that the underlying gRPC stub method was called. @@ -2122,9 +2518,9 @@ async def test_get_instance_auth_string_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_instance_auth_string_flattened(): @@ -2134,14 +2530,14 @@ def test_get_instance_auth_string_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: + type(client.transport.get_instance_auth_string), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.InstanceAuthString() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance_auth_string( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2149,7 +2545,7 @@ def test_get_instance_auth_string_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -2163,9 +2559,10 @@ def test_get_instance_auth_string_flattened_error(): with pytest.raises(ValueError): client.get_instance_auth_string( cloud_redis.GetInstanceAuthStringRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_instance_auth_string_flattened_async(): client = CloudRedisAsyncClient( @@ -2174,16 +2571,18 @@ async def test_get_instance_auth_string_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: + type(client.transport.get_instance_auth_string), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.InstanceAuthString() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.InstanceAuthString() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_instance_auth_string( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2191,9 +2590,10 @@ async def test_get_instance_auth_string_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_instance_auth_string_flattened_error_async(): client = CloudRedisAsyncClient( @@ -2205,15 +2605,18 @@ async def test_get_instance_auth_string_flattened_error_async(): with pytest.raises(ValueError): await client.get_instance_auth_string( cloud_redis.GetInstanceAuthStringRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.CreateInstanceRequest, - dict, -]) -def test_create_instance(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.CreateInstanceRequest, + dict, + ], +) +def test_create_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2224,11 +2627,9 @@ def test_create_instance(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2246,30 +2647,31 @@ def test_create_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', + parent="parent_value", + instance_id="instance_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', + parent="parent_value", + instance_id="instance_id_value", ) + def test_create_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2288,7 +2690,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc request = {} client.create_instance(request) @@ -2307,8 +2711,11 @@ def test_create_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2322,12 +2729,17 @@ async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_instance + ] = mock_rpc request = {} await client.create_instance(request) @@ -2346,8 +2758,11 @@ async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.CreateInstanceRequest): +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.CreateInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2358,12 +2773,10 @@ async def test_create_instance_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.create_instance(request) @@ -2381,6 +2794,7 @@ async def test_create_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_create_instance_async_from_dict(): await test_create_instance_async(request_type=dict) + def test_create_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2390,13 +2804,11 @@ def test_create_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.CreateInstanceRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2407,9 +2819,9 @@ def test_create_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2422,13 +2834,13 @@ async def test_create_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.CreateInstanceRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2439,9 +2851,9 @@ async def test_create_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_instance_flattened(): @@ -2450,17 +2862,15 @@ def test_create_instance_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2468,13 +2878,13 @@ def test_create_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].instance_id - mock_val = 'instance_id_value' + mock_val = "instance_id_value" assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @@ -2488,11 +2898,12 @@ def test_create_instance_flattened_error(): with pytest.raises(ValueError): client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) + @pytest.mark.asyncio async def test_create_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -2500,21 +2911,19 @@ async def test_create_instance_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2522,15 +2931,16 @@ async def test_create_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].instance_id - mock_val = 'instance_id_value' + mock_val = "instance_id_value" assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -2542,17 +2952,20 @@ async def test_create_instance_flattened_error_async(): with pytest.raises(ValueError): await client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpdateInstanceRequest, - dict, -]) -def test_update_instance(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2563,11 +2976,9 @@ def test_update_instance(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2585,25 +2996,24 @@ def test_update_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_redis.UpdateInstanceRequest( - ) + request = cloud_redis.UpdateInstanceRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpdateInstanceRequest( - ) + assert args[0] == cloud_redis.UpdateInstanceRequest() + def test_update_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2623,7 +3033,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc request = {} client.update_instance(request) @@ -2642,8 +3054,11 @@ def test_update_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2657,12 +3072,17 @@ async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_instance + ] = mock_rpc request = {} await client.update_instance(request) @@ -2681,8 +3101,11 @@ async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpdateInstanceRequest): +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.UpdateInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2693,12 +3116,10 @@ async def test_update_instance_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.update_instance(request) @@ -2716,6 +3137,7 @@ async def test_update_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_update_instance_async_from_dict(): await test_update_instance_async(request_type=dict) + def test_update_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2725,13 +3147,11 @@ def test_update_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.UpdateInstanceRequest() - request.instance.name = 'name_value' + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2742,9 +3162,9 @@ def test_update_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'instance.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2757,13 +3177,13 @@ async def test_update_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.UpdateInstanceRequest() - request.instance.name = 'name_value' + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2774,9 +3194,9 @@ async def test_update_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'instance.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] def test_update_instance_flattened(): @@ -2785,16 +3205,14 @@ def test_update_instance_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2802,10 +3220,10 @@ def test_update_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @@ -2819,10 +3237,11 @@ def test_update_instance_flattened_error(): with pytest.raises(ValueError): client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) + @pytest.mark.asyncio async def test_update_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -2830,20 +3249,18 @@ async def test_update_instance_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2851,12 +3268,13 @@ async def test_update_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -2868,16 +3286,19 @@ async def test_update_instance_flattened_error_async(): with pytest.raises(ValueError): await client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpgradeInstanceRequest, - dict, -]) -def test_upgrade_instance(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.UpgradeInstanceRequest, + dict, + ], +) +def test_upgrade_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2888,11 +3309,9 @@ def test_upgrade_instance(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2910,30 +3329,31 @@ def test_upgrade_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.UpgradeInstanceRequest( - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.upgrade_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.UpgradeInstanceRequest( - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) + def test_upgrade_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2952,8 +3372,12 @@ def test_upgrade_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.upgrade_instance] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.upgrade_instance] = ( + mock_rpc + ) request = {} client.upgrade_instance(request) @@ -2971,8 +3395,11 @@ def test_upgrade_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_upgrade_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_upgrade_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2986,12 +3413,17 @@ async def test_upgrade_instance_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.upgrade_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.upgrade_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.upgrade_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.upgrade_instance + ] = mock_rpc request = {} await client.upgrade_instance(request) @@ -3010,8 +3442,11 @@ async def test_upgrade_instance_async_use_cached_wrapped_rpc(transport: str = "g assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpgradeInstanceRequest): +async def test_upgrade_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.UpgradeInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3022,12 +3457,10 @@ async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_t request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.upgrade_instance(request) @@ -3045,6 +3478,7 @@ async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_t async def test_upgrade_instance_async_from_dict(): await test_upgrade_instance_async(request_type=dict) + def test_upgrade_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3054,13 +3488,11 @@ def test_upgrade_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.UpgradeInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. @@ -3071,9 +3503,9 @@ def test_upgrade_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3086,13 +3518,13 @@ async def test_upgrade_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.UpgradeInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. @@ -3103,9 +3535,9 @@ async def test_upgrade_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_upgrade_instance_flattened(): @@ -3114,16 +3546,14 @@ def test_upgrade_instance_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.upgrade_instance( - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) # Establish that the underlying call was made with the expected @@ -3131,10 +3561,10 @@ def test_upgrade_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].redis_version - mock_val = 'redis_version_value' + mock_val = "redis_version_value" assert arg == mock_val @@ -3148,10 +3578,11 @@ def test_upgrade_instance_flattened_error(): with pytest.raises(ValueError): client.upgrade_instance( cloud_redis.UpgradeInstanceRequest(), - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) + @pytest.mark.asyncio async def test_upgrade_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -3159,20 +3590,18 @@ async def test_upgrade_instance_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.upgrade_instance( - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) # Establish that the underlying call was made with the expected @@ -3180,12 +3609,13 @@ async def test_upgrade_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].redis_version - mock_val = 'redis_version_value' + mock_val = "redis_version_value" assert arg == mock_val + @pytest.mark.asyncio async def test_upgrade_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -3197,16 +3627,19 @@ async def test_upgrade_instance_flattened_error_async(): with pytest.raises(ValueError): await client.upgrade_instance( cloud_redis.UpgradeInstanceRequest(), - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.ImportInstanceRequest, - dict, -]) -def test_import_instance(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ImportInstanceRequest, + dict, + ], +) +def test_import_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3217,11 +3650,9 @@ def test_import_instance(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.import_instance(request) # Establish that the underlying gRPC stub method was called. @@ -3239,28 +3670,29 @@ def test_import_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.ImportInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.import_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ImportInstanceRequest( - name='name_value', + name="name_value", ) + def test_import_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3279,7 +3711,9 @@ def test_import_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.import_instance] = mock_rpc request = {} client.import_instance(request) @@ -3298,8 +3732,11 @@ def test_import_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_import_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_import_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3313,12 +3750,17 @@ async def test_import_instance_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.import_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.import_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.import_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.import_instance + ] = mock_rpc request = {} await client.import_instance(request) @@ -3337,8 +3779,11 @@ async def test_import_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_import_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ImportInstanceRequest): +async def test_import_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.ImportInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3349,12 +3794,10 @@ async def test_import_instance_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.import_instance(request) @@ -3372,6 +3815,7 @@ async def test_import_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_import_instance_async_from_dict(): await test_import_instance_async(request_type=dict) + def test_import_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3381,13 +3825,11 @@ def test_import_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.ImportInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.import_instance(request) # Establish that the underlying gRPC stub method was called. @@ -3398,9 +3840,9 @@ def test_import_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3413,13 +3855,13 @@ async def test_import_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.ImportInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.import_instance(request) # Establish that the underlying gRPC stub method was called. @@ -3430,9 +3872,9 @@ async def test_import_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_import_instance_flattened(): @@ -3441,16 +3883,16 @@ def test_import_instance_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.import_instance( - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig( + gcs_source=cloud_redis.GcsSource(uri="uri_value") + ), ) # Establish that the underlying call was made with the expected @@ -3458,10 +3900,12 @@ def test_import_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].input_config - mock_val = cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')) + mock_val = cloud_redis.InputConfig( + gcs_source=cloud_redis.GcsSource(uri="uri_value") + ) assert arg == mock_val @@ -3475,10 +3919,13 @@ def test_import_instance_flattened_error(): with pytest.raises(ValueError): client.import_instance( cloud_redis.ImportInstanceRequest(), - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig( + gcs_source=cloud_redis.GcsSource(uri="uri_value") + ), ) + @pytest.mark.asyncio async def test_import_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -3486,20 +3933,20 @@ async def test_import_instance_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.import_instance( - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig( + gcs_source=cloud_redis.GcsSource(uri="uri_value") + ), ) # Establish that the underlying call was made with the expected @@ -3507,12 +3954,15 @@ async def test_import_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].input_config - mock_val = cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')) + mock_val = cloud_redis.InputConfig( + gcs_source=cloud_redis.GcsSource(uri="uri_value") + ) assert arg == mock_val + @pytest.mark.asyncio async def test_import_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -3524,16 +3974,21 @@ async def test_import_instance_flattened_error_async(): with pytest.raises(ValueError): await client.import_instance( cloud_redis.ImportInstanceRequest(), - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig( + gcs_source=cloud_redis.GcsSource(uri="uri_value") + ), ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.ExportInstanceRequest, - dict, -]) -def test_export_instance(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ExportInstanceRequest, + dict, + ], +) +def test_export_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3544,11 +3999,9 @@ def test_export_instance(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.export_instance(request) # Establish that the underlying gRPC stub method was called. @@ -3566,28 +4019,29 @@ def test_export_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.ExportInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.export_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ExportInstanceRequest( - name='name_value', + name="name_value", ) + def test_export_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3606,7 +4060,9 @@ def test_export_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.export_instance] = mock_rpc request = {} client.export_instance(request) @@ -3625,8 +4081,11 @@ def test_export_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_export_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_export_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3640,12 +4099,17 @@ async def test_export_instance_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.export_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.export_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.export_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.export_instance + ] = mock_rpc request = {} await client.export_instance(request) @@ -3664,8 +4128,11 @@ async def test_export_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_export_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ExportInstanceRequest): +async def test_export_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.ExportInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -3676,12 +4143,10 @@ async def test_export_instance_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.export_instance(request) @@ -3699,6 +4164,7 @@ async def test_export_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_export_instance_async_from_dict(): await test_export_instance_async(request_type=dict) + def test_export_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3708,13 +4174,11 @@ def test_export_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.ExportInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.export_instance(request) # Establish that the underlying gRPC stub method was called. @@ -3725,9 +4189,9 @@ def test_export_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3740,13 +4204,13 @@ async def test_export_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.ExportInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.export_instance(request) # Establish that the underlying gRPC stub method was called. @@ -3757,9 +4221,9 @@ async def test_export_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_export_instance_flattened(): @@ -3768,16 +4232,16 @@ def test_export_instance_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.export_instance( - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig( + gcs_destination=cloud_redis.GcsDestination(uri="uri_value") + ), ) # Establish that the underlying call was made with the expected @@ -3785,10 +4249,12 @@ def test_export_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].output_config - mock_val = cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')) + mock_val = cloud_redis.OutputConfig( + gcs_destination=cloud_redis.GcsDestination(uri="uri_value") + ) assert arg == mock_val @@ -3802,10 +4268,13 @@ def test_export_instance_flattened_error(): with pytest.raises(ValueError): client.export_instance( cloud_redis.ExportInstanceRequest(), - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig( + gcs_destination=cloud_redis.GcsDestination(uri="uri_value") + ), ) + @pytest.mark.asyncio async def test_export_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -3813,20 +4282,20 @@ async def test_export_instance_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.export_instance( - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig( + gcs_destination=cloud_redis.GcsDestination(uri="uri_value") + ), ) # Establish that the underlying call was made with the expected @@ -3834,12 +4303,15 @@ async def test_export_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].output_config - mock_val = cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')) + mock_val = cloud_redis.OutputConfig( + gcs_destination=cloud_redis.GcsDestination(uri="uri_value") + ) assert arg == mock_val + @pytest.mark.asyncio async def test_export_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -3851,16 +4323,21 @@ async def test_export_instance_flattened_error_async(): with pytest.raises(ValueError): await client.export_instance( cloud_redis.ExportInstanceRequest(), - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig( + gcs_destination=cloud_redis.GcsDestination(uri="uri_value") + ), ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.FailoverInstanceRequest, - dict, -]) -def test_failover_instance(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.FailoverInstanceRequest, + dict, + ], +) +def test_failover_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3872,10 +4349,10 @@ def test_failover_instance(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: + type(client.transport.failover_instance), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.failover_instance(request) # Establish that the underlying gRPC stub method was called. @@ -3893,28 +4370,31 @@ def test_failover_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.FailoverInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.failover_instance), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.failover_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.FailoverInstanceRequest( - name='name_value', + name="name_value", ) + def test_failover_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3933,8 +4413,12 @@ def test_failover_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.failover_instance] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.failover_instance] = ( + mock_rpc + ) request = {} client.failover_instance(request) @@ -3952,8 +4436,11 @@ def test_failover_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_failover_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_failover_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -3967,12 +4454,17 @@ async def test_failover_instance_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.failover_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.failover_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.failover_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.failover_instance + ] = mock_rpc request = {} await client.failover_instance(request) @@ -3991,8 +4483,11 @@ async def test_failover_instance_async_use_cached_wrapped_rpc(transport: str = " assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.FailoverInstanceRequest): +async def test_failover_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.FailoverInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4004,11 +4499,11 @@ async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: + type(client.transport.failover_instance), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.failover_instance(request) @@ -4026,6 +4521,7 @@ async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_ async def test_failover_instance_async_from_dict(): await test_failover_instance_async(request_type=dict) + def test_failover_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4035,13 +4531,13 @@ def test_failover_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.FailoverInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.failover_instance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.failover_instance(request) # Establish that the underlying gRPC stub method was called. @@ -4052,9 +4548,9 @@ def test_failover_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4067,13 +4563,15 @@ async def test_failover_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.FailoverInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + type(client.transport.failover_instance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.failover_instance(request) # Establish that the underlying gRPC stub method was called. @@ -4084,9 +4582,9 @@ async def test_failover_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_failover_instance_flattened(): @@ -4096,14 +4594,14 @@ def test_failover_instance_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: + type(client.transport.failover_instance), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.failover_instance( - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) @@ -4112,10 +4610,12 @@ def test_failover_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].data_protection_mode - mock_val = cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS + mock_val = ( + cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS + ) assert arg == mock_val @@ -4129,10 +4629,11 @@ def test_failover_instance_flattened_error(): with pytest.raises(ValueError): client.failover_instance( cloud_redis.FailoverInstanceRequest(), - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) + @pytest.mark.asyncio async def test_failover_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -4141,18 +4642,18 @@ async def test_failover_instance_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: + type(client.transport.failover_instance), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.failover_instance( - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) @@ -4161,12 +4662,15 @@ async def test_failover_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].data_protection_mode - mock_val = cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS + mock_val = ( + cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS + ) assert arg == mock_val + @pytest.mark.asyncio async def test_failover_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -4178,16 +4682,19 @@ async def test_failover_instance_flattened_error_async(): with pytest.raises(ValueError): await client.failover_instance( cloud_redis.FailoverInstanceRequest(), - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.DeleteInstanceRequest, - dict, -]) -def test_delete_instance(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4198,11 +4705,9 @@ def test_delete_instance(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -4220,28 +4725,29 @@ def test_delete_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.DeleteInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.DeleteInstanceRequest( - name='name_value', + name="name_value", ) + def test_delete_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4260,7 +4766,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc request = {} client.delete_instance(request) @@ -4279,8 +4787,11 @@ def test_delete_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4294,12 +4805,17 @@ async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_instance + ] = mock_rpc request = {} await client.delete_instance(request) @@ -4318,8 +4834,11 @@ async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.DeleteInstanceRequest): +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.DeleteInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4330,12 +4849,10 @@ async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.delete_instance(request) @@ -4353,6 +4870,7 @@ async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_delete_instance_async_from_dict(): await test_delete_instance_async(request_type=dict) + def test_delete_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4362,13 +4880,11 @@ def test_delete_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.DeleteInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -4379,9 +4895,9 @@ def test_delete_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4394,13 +4910,13 @@ async def test_delete_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.DeleteInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -4411,9 +4927,9 @@ async def test_delete_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_instance_flattened(): @@ -4422,15 +4938,13 @@ def test_delete_instance_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4438,7 +4952,7 @@ def test_delete_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -4452,9 +4966,10 @@ def test_delete_instance_flattened_error(): with pytest.raises(ValueError): client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -4462,19 +4977,17 @@ async def test_delete_instance_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4482,9 +4995,10 @@ async def test_delete_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -4496,15 +5010,18 @@ async def test_delete_instance_flattened_error_async(): with pytest.raises(ValueError): await client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.RescheduleMaintenanceRequest, - dict, -]) -def test_reschedule_maintenance(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.RescheduleMaintenanceRequest, + dict, + ], +) +def test_reschedule_maintenance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4516,10 +5033,10 @@ def test_reschedule_maintenance(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: + type(client.transport.reschedule_maintenance), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.reschedule_maintenance(request) # Establish that the underlying gRPC stub method was called. @@ -4537,28 +5054,31 @@ def test_reschedule_maintenance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.RescheduleMaintenanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.reschedule_maintenance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.RescheduleMaintenanceRequest( - name='name_value', + name="name_value", ) + def test_reschedule_maintenance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4573,12 +5093,19 @@ def test_reschedule_maintenance_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.reschedule_maintenance in client._transport._wrapped_methods + assert ( + client._transport.reschedule_maintenance + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.reschedule_maintenance] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.reschedule_maintenance] = ( + mock_rpc + ) request = {} client.reschedule_maintenance(request) @@ -4596,8 +5123,11 @@ def test_reschedule_maintenance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_reschedule_maintenance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_reschedule_maintenance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -4611,12 +5141,17 @@ async def test_reschedule_maintenance_async_use_cached_wrapped_rpc(transport: st wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.reschedule_maintenance in client._client._transport._wrapped_methods + assert ( + client._client._transport.reschedule_maintenance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.reschedule_maintenance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.reschedule_maintenance + ] = mock_rpc request = {} await client.reschedule_maintenance(request) @@ -4635,8 +5170,12 @@ async def test_reschedule_maintenance_async_use_cached_wrapped_rpc(transport: st assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.RescheduleMaintenanceRequest): +async def test_reschedule_maintenance_async( + transport: str = "grpc_asyncio", + request_type=cloud_redis.RescheduleMaintenanceRequest, +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -4648,11 +5187,11 @@ async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', req # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: + type(client.transport.reschedule_maintenance), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.reschedule_maintenance(request) @@ -4670,6 +5209,7 @@ async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', req async def test_reschedule_maintenance_async_from_dict(): await test_reschedule_maintenance_async(request_type=dict) + def test_reschedule_maintenance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4679,13 +5219,13 @@ def test_reschedule_maintenance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.RescheduleMaintenanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.reschedule_maintenance(request) # Establish that the underlying gRPC stub method was called. @@ -4696,9 +5236,9 @@ def test_reschedule_maintenance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4711,13 +5251,15 @@ async def test_reschedule_maintenance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.RescheduleMaintenanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.reschedule_maintenance(request) # Establish that the underlying gRPC stub method was called. @@ -4728,9 +5270,9 @@ async def test_reschedule_maintenance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_reschedule_maintenance_flattened(): @@ -4740,14 +5282,14 @@ def test_reschedule_maintenance_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: + type(client.transport.reschedule_maintenance), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.reschedule_maintenance( - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -4757,12 +5299,14 @@ def test_reschedule_maintenance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].reschedule_type mock_val = cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE assert arg == mock_val - assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) def test_reschedule_maintenance_flattened_error(): @@ -4775,11 +5319,12 @@ def test_reschedule_maintenance_flattened_error(): with pytest.raises(ValueError): client.reschedule_maintenance( cloud_redis.RescheduleMaintenanceRequest(), - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) + @pytest.mark.asyncio async def test_reschedule_maintenance_flattened_async(): client = CloudRedisAsyncClient( @@ -4788,18 +5333,18 @@ async def test_reschedule_maintenance_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: + type(client.transport.reschedule_maintenance), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.reschedule_maintenance( - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -4809,12 +5354,15 @@ async def test_reschedule_maintenance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].reschedule_type mock_val = cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE assert arg == mock_val - assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + assert TimestampRule().to_proto( + args[0].schedule_time + ) == timestamp_pb2.Timestamp(seconds=751) + @pytest.mark.asyncio async def test_reschedule_maintenance_flattened_error_async(): @@ -4827,7 +5375,7 @@ async def test_reschedule_maintenance_flattened_error_async(): with pytest.raises(ValueError): await client.reschedule_maintenance( cloud_redis.RescheduleMaintenanceRequest(), - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -4851,7 +5399,9 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc request = {} @@ -4867,57 +5417,67 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstancesRequest): +def test_list_instances_rest_required_fields( + request_type=cloud_redis.ListInstancesRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -4928,23 +5488,32 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan return_value = cloud_redis.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_instances(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_instances_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_instances_rest_flattened(): @@ -4954,16 +5523,16 @@ def test_list_instances_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -4973,7 +5542,7 @@ def test_list_instances_rest_flattened(): # Convert return value to protobuf type return_value = cloud_redis.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -4983,10 +5552,13 @@ def test_list_instances_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) -def test_list_instances_rest_flattened_error(transport: str = 'rest'): +def test_list_instances_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4997,20 +5569,20 @@ def test_list_instances_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_instances_rest_pager(transport: str = 'rest'): +def test_list_instances_rest_pager(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( cloud_redis.ListInstancesResponse( @@ -5019,17 +5591,17 @@ def test_list_instances_rest_pager(transport: str = 'rest'): cloud_redis.Instance(), cloud_redis.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), cloud_redis.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), cloud_redis.ListInstancesResponse( instances=[ cloud_redis.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), cloud_redis.ListInstancesResponse( instances=[ @@ -5045,21 +5617,20 @@ def test_list_instances_rest_pager(transport: str = 'rest'): response = tuple(cloud_redis.ListInstancesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_instances(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_redis.Instance) - for i in results) + assert all(isinstance(i, cloud_redis.Instance) for i in results) pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -5081,7 +5652,9 @@ def test_get_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc request = {} @@ -5104,48 +5677,51 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -5156,23 +5732,24 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR return_value = cloud_redis.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_instance_rest_flattened(): @@ -5182,16 +5759,18 @@ def test_get_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -5201,7 +5780,7 @@ def test_get_instance_rest_flattened(): # Convert return value to protobuf type return_value = cloud_redis.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -5211,10 +5790,13 @@ def test_get_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) -def test_get_instance_rest_flattened_error(transport: str = 'rest'): +def test_get_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5225,7 +5807,7 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) @@ -5243,12 +5825,19 @@ def test_get_instance_auth_string_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_instance_auth_string in client._transport._wrapped_methods + assert ( + client._transport.get_instance_auth_string + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance_auth_string] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_instance_auth_string + ] = mock_rpc request = {} client.get_instance_auth_string(request) @@ -5263,55 +5852,60 @@ def test_get_instance_auth_string_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis.GetInstanceAuthStringRequest): +def test_get_instance_auth_string_rest_required_fields( + request_type=cloud_redis.GetInstanceAuthStringRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_auth_string._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance_auth_string._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_auth_string._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance_auth_string._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = cloud_redis.InstanceAuthString() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -5322,23 +5916,24 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. return_value = cloud_redis.InstanceAuthString.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance_auth_string(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_instance_auth_string_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_instance_auth_string._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_instance_auth_string_rest_flattened(): @@ -5348,16 +5943,18 @@ def test_get_instance_auth_string_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.InstanceAuthString() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -5367,7 +5964,7 @@ def test_get_instance_auth_string_rest_flattened(): # Convert return value to protobuf type return_value = cloud_redis.InstanceAuthString.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -5377,10 +5974,14 @@ def test_get_instance_auth_string_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}/authString" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}/authString" + % client.transport._host, + args[1], + ) -def test_get_instance_auth_string_rest_flattened_error(transport: str = 'rest'): +def test_get_instance_auth_string_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5391,7 +5992,7 @@ def test_get_instance_auth_string_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_instance_auth_string( cloud_redis.GetInstanceAuthStringRequest(), - name='name_value', + name="name_value", ) @@ -5413,7 +6014,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc request = {} @@ -5433,7 +6036,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateInstanceRequest): +def test_create_instance_rest_required_fields( + request_type=cloud_redis.CreateInstanceRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} @@ -5441,65 +6046,68 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns request_init["instance_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "instanceId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "instanceId" in jsonified_request assert jsonified_request["instanceId"] == request_init["instance_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceId"] = 'instance_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("instance_id", )) + assert not set(unset_fields) - set(("instance_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == 'instance_id_value' + assert jsonified_request["instanceId"] == "instance_id_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -5511,15 +6119,26 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("instanceId", )) & set(("parent", "instanceId", "instance", ))) + assert set(unset_fields) == ( + set(("instanceId",)) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) def test_create_instance_rest_flattened(): @@ -5529,18 +6148,18 @@ def test_create_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) mock_args.update(sample_request) @@ -5548,7 +6167,7 @@ def test_create_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -5558,10 +6177,13 @@ def test_create_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) -def test_create_instance_rest_flattened_error(transport: str = 'rest'): +def test_create_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5572,9 +6194,9 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) @@ -5596,7 +6218,9 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc request = {} @@ -5616,77 +6240,91 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateInstanceRequest): +def test_update_instance_rest_required_fields( + request_type=cloud_redis.UpdateInstanceRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_instance(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("updateMask", "instance", ))) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "instance", + ) + ) + ) def test_update_instance_rest_flattened(): @@ -5696,17 +6334,19 @@ def test_update_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + sample_request = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } # get truthy value for each flattened field mock_args = dict( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) mock_args.update(sample_request) @@ -5714,7 +6354,7 @@ def test_update_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -5724,10 +6364,14 @@ def test_update_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{instance.name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{instance.name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) -def test_update_instance_rest_flattened_error(transport: str = 'rest'): +def test_update_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5738,8 +6382,8 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) @@ -5761,8 +6405,12 @@ def test_upgrade_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.upgrade_instance] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.upgrade_instance] = ( + mock_rpc + ) request = {} client.upgrade_instance(request) @@ -5781,7 +6429,9 @@ def test_upgrade_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeInstanceRequest): +def test_upgrade_instance_rest_required_fields( + request_type=cloud_redis.UpgradeInstanceRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} @@ -5789,76 +6439,88 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI request_init["redis_version"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).upgrade_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).upgrade_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' - jsonified_request["redisVersion"] = 'redis_version_value' + jsonified_request["name"] = "name_value" + jsonified_request["redisVersion"] = "redis_version_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).upgrade_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).upgrade_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "redisVersion" in jsonified_request - assert jsonified_request["redisVersion"] == 'redis_version_value' + assert jsonified_request["redisVersion"] == "redis_version_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.upgrade_instance(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_upgrade_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.upgrade_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "redisVersion", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "redisVersion", + ) + ) + ) def test_upgrade_instance_rest_flattened(): @@ -5868,17 +6530,19 @@ def test_upgrade_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) mock_args.update(sample_request) @@ -5886,7 +6550,7 @@ def test_upgrade_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -5896,10 +6560,14 @@ def test_upgrade_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}:upgrade" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}:upgrade" + % client.transport._host, + args[1], + ) -def test_upgrade_instance_rest_flattened_error(transport: str = 'rest'): +def test_upgrade_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5910,8 +6578,8 @@ def test_upgrade_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.upgrade_instance( cloud_redis.UpgradeInstanceRequest(), - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) @@ -5933,7 +6601,9 @@ def test_import_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.import_instance] = mock_rpc request = {} @@ -5953,80 +6623,94 @@ def test_import_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportInstanceRequest): +def test_import_instance_rest_required_fields( + request_type=cloud_redis.ImportInstanceRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).import_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.import_instance(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_import_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.import_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "inputConfig", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "inputConfig", + ) + ) + ) def test_import_instance_rest_flattened(): @@ -6036,17 +6720,21 @@ def test_import_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig( + gcs_source=cloud_redis.GcsSource(uri="uri_value") + ), ) mock_args.update(sample_request) @@ -6054,7 +6742,7 @@ def test_import_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6064,10 +6752,14 @@ def test_import_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}:import" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}:import" + % client.transport._host, + args[1], + ) -def test_import_instance_rest_flattened_error(transport: str = 'rest'): +def test_import_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6078,8 +6770,10 @@ def test_import_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.import_instance( cloud_redis.ImportInstanceRequest(), - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig( + gcs_source=cloud_redis.GcsSource(uri="uri_value") + ), ) @@ -6101,7 +6795,9 @@ def test_export_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.export_instance] = mock_rpc request = {} @@ -6121,80 +6817,94 @@ def test_export_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportInstanceRequest): +def test_export_instance_rest_required_fields( + request_type=cloud_redis.ExportInstanceRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_instance(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_export_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.export_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "outputConfig", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "outputConfig", + ) + ) + ) def test_export_instance_rest_flattened(): @@ -6204,17 +6914,21 @@ def test_export_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig( + gcs_destination=cloud_redis.GcsDestination(uri="uri_value") + ), ) mock_args.update(sample_request) @@ -6222,7 +6936,7 @@ def test_export_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6232,10 +6946,14 @@ def test_export_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}:export" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}:export" + % client.transport._host, + args[1], + ) -def test_export_instance_rest_flattened_error(transport: str = 'rest'): +def test_export_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6246,8 +6964,10 @@ def test_export_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.export_instance( cloud_redis.ExportInstanceRequest(), - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig( + gcs_destination=cloud_redis.GcsDestination(uri="uri_value") + ), ) @@ -6269,8 +6989,12 @@ def test_failover_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.failover_instance] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.failover_instance] = ( + mock_rpc + ) request = {} client.failover_instance(request) @@ -6289,80 +7013,86 @@ def test_failover_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_failover_instance_rest_required_fields(request_type=cloud_redis.FailoverInstanceRequest): +def test_failover_instance_rest_required_fields( + request_type=cloud_redis.FailoverInstanceRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).failover_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).failover_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).failover_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).failover_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.failover_instance(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_failover_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.failover_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_failover_instance_rest_flattened(): @@ -6372,16 +7102,18 @@ def test_failover_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) mock_args.update(sample_request) @@ -6390,7 +7122,7 @@ def test_failover_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6400,10 +7132,14 @@ def test_failover_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}:failover" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}:failover" + % client.transport._host, + args[1], + ) -def test_failover_instance_rest_flattened_error(transport: str = 'rest'): +def test_failover_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6414,7 +7150,7 @@ def test_failover_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.failover_instance( cloud_redis.FailoverInstanceRequest(), - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) @@ -6437,7 +7173,9 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc request = {} @@ -6457,55 +7195,60 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteInstanceRequest): +def test_delete_instance_rest_required_fields( + request_type=cloud_redis.DeleteInstanceRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -6513,23 +7256,24 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_instance(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_delete_instance_rest_flattened(): @@ -6539,16 +7283,18 @@ def test_delete_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -6556,7 +7302,7 @@ def test_delete_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6566,10 +7312,13 @@ def test_delete_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) -def test_delete_instance_rest_flattened_error(transport: str = 'rest'): +def test_delete_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6580,7 +7329,7 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @@ -6598,12 +7347,19 @@ def test_reschedule_maintenance_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.reschedule_maintenance in client._transport._wrapped_methods + assert ( + client._transport.reschedule_maintenance + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.reschedule_maintenance] = mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.reschedule_maintenance] = ( + mock_rpc + ) request = {} client.reschedule_maintenance(request) @@ -6622,80 +7378,94 @@ def test_reschedule_maintenance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.RescheduleMaintenanceRequest): +def test_reschedule_maintenance_rest_required_fields( + request_type=cloud_redis.RescheduleMaintenanceRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reschedule_maintenance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reschedule_maintenance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reschedule_maintenance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).reschedule_maintenance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.reschedule_maintenance(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_reschedule_maintenance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.reschedule_maintenance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "rescheduleType", ))) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "rescheduleType", + ) + ) + ) def test_reschedule_maintenance_rest_flattened(): @@ -6705,16 +7475,18 @@ def test_reschedule_maintenance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -6724,7 +7496,7 @@ def test_reschedule_maintenance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6734,10 +7506,14 @@ def test_reschedule_maintenance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance" + % client.transport._host, + args[1], + ) -def test_reschedule_maintenance_rest_flattened_error(transport: str = 'rest'): +def test_reschedule_maintenance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6748,7 +7524,7 @@ def test_reschedule_maintenance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.reschedule_maintenance( cloud_redis.RescheduleMaintenanceRequest(), - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -6792,8 +7568,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = CloudRedisClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -6815,6 +7590,7 @@ def test_transport_instance(): client = CloudRedisClient(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.CloudRedisGrpcTransport( @@ -6829,18 +7605,23 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.CloudRedisGrpcTransport, - transports.CloudRedisGrpcAsyncIOTransport, - transports.CloudRedisRestTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + transports.CloudRedisRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = CloudRedisClient.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -6850,8 +7631,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -6865,9 +7645,7 @@ def test_list_instances_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_redis.ListInstancesResponse() client.list_instances(request=None) @@ -6888,9 +7666,7 @@ def test_get_instance_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_redis.Instance() client.get_instance(request=None) @@ -6912,8 +7688,8 @@ def test_get_instance_auth_string_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: + type(client.transport.get_instance_auth_string), "__call__" + ) as call: call.return_value = cloud_redis.InstanceAuthString() client.get_instance_auth_string(request=None) @@ -6934,10 +7710,8 @@ def test_create_instance_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_instance(request=None) # Establish that the underlying stub method was called. @@ -6957,10 +7731,8 @@ def test_update_instance_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_instance(request=None) # Establish that the underlying stub method was called. @@ -6980,10 +7752,8 @@ def test_upgrade_instance_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.upgrade_instance(request=None) # Establish that the underlying stub method was called. @@ -7003,10 +7773,8 @@ def test_import_instance_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.import_instance(request=None) # Establish that the underlying stub method was called. @@ -7026,10 +7794,8 @@ def test_export_instance_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.export_instance(request=None) # Establish that the underlying stub method was called. @@ -7050,9 +7816,9 @@ def test_failover_instance_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.failover_instance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.failover_instance(request=None) # Establish that the underlying stub method was called. @@ -7072,10 +7838,8 @@ def test_delete_instance_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_instance(request=None) # Establish that the underlying stub method was called. @@ -7096,9 +7860,9 @@ def test_reschedule_maintenance_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + type(client.transport.reschedule_maintenance), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.reschedule_maintenance(request=None) # Establish that the underlying stub method was called. @@ -7118,8 +7882,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -7134,14 +7897,14 @@ async def test_list_instances_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) await client.list_instances(request=None) # Establish that the underlying stub method was called. @@ -7162,39 +7925,41 @@ async def test_get_instance_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.Instance( + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", + port=453, + current_location_id="current_location_id_value", + state=cloud_redis.Instance.State.CREATING, + status_message="status_message_value", + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint="read_endpoint_value", + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key="customer_managed_key_value", + suspension_reasons=[ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], + ) + ) await client.get_instance(request=None) # Establish that the underlying stub method was called. @@ -7216,12 +7981,14 @@ async def test_get_instance_auth_string_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: + type(client.transport.get_instance_auth_string), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( - auth_string='auth_string_value', - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.InstanceAuthString( + auth_string="auth_string_value", + ) + ) await client.get_instance_auth_string(request=None) # Establish that the underlying stub method was called. @@ -7242,12 +8009,10 @@ async def test_create_instance_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.create_instance(request=None) @@ -7269,12 +8034,10 @@ async def test_update_instance_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.update_instance(request=None) @@ -7296,12 +8059,10 @@ async def test_upgrade_instance_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.upgrade_instance(request=None) @@ -7323,12 +8084,10 @@ async def test_import_instance_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.import_instance(request=None) @@ -7350,12 +8109,10 @@ async def test_export_instance_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.export_instance(request=None) @@ -7378,11 +8135,11 @@ async def test_failover_instance_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: + type(client.transport.failover_instance), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.failover_instance(request=None) @@ -7404,12 +8161,10 @@ async def test_delete_instance_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.delete_instance(request=None) @@ -7432,11 +8187,11 @@ async def test_reschedule_maintenance_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: + type(client.transport.reschedule_maintenance), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.reschedule_maintenance(request=None) @@ -7457,18 +8212,20 @@ def test_transport_kind_rest(): def test_list_instances_rest_bad_request(request_type=cloud_redis.ListInstancesRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -7477,26 +8234,28 @@ def test_list_instances_rest_bad_request(request_type=cloud_redis.ListInstancesR client.list_instances(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.ListInstancesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ListInstancesRequest, + dict, + ], +) def test_list_instances_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -7506,34 +8265,46 @@ def test_list_instances_rest_call_success(request_type): # Convert return value to protobuf type return_value = cloud_redis.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_instances(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_instances_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_list_instances") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_list_instances" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_list_instances" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) + pb_message = cloud_redis.ListInstancesRequest.pb( + cloud_redis.ListInstancesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7544,11 +8315,13 @@ def test_list_instances_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) + return_value = cloud_redis.ListInstancesResponse.to_json( + cloud_redis.ListInstancesResponse() + ) req.return_value.content = return_value request = cloud_redis.ListInstancesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -7556,7 +8329,13 @@ def test_list_instances_rest_interceptors(null_interceptor): post.return_value = cloud_redis.ListInstancesResponse() post_with_metadata.return_value = cloud_redis.ListInstancesResponse(), metadata - client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -7565,18 +8344,20 @@ def test_list_instances_rest_interceptors(null_interceptor): def test_get_instance_rest_bad_request(request_type=cloud_redis.GetInstanceRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -7585,51 +8366,55 @@ def test_get_instance_rest_bad_request(request_type=cloud_redis.GetInstanceReque client.get_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.GetInstanceRequest, + dict, + ], +) def test_get_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", + port=453, + current_location_id="current_location_id_value", + state=cloud_redis.Instance.State.CREATING, + status_message="status_message_value", + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint="read_endpoint_value", + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key="customer_managed_key_value", + suspension_reasons=[ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) # Wrap the value into a proper Response obj @@ -7639,55 +8424,75 @@ def test_get_instance_rest_call_success(request_type): # Convert return value to protobuf type return_value = cloud_redis.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance(request) # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True - assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert ( + response.transit_encryption_mode + == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + ) assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 - assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' - assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert ( + response.read_replicas_mode + == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + ) + assert response.customer_managed_key == "customer_managed_key_value" + assert response.suspension_reasons == [ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == [ + "available_maintenance_versions_value" + ] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_get_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_get_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -7706,7 +8511,7 @@ def test_get_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.GetInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -7714,27 +8519,37 @@ def test_get_instance_rest_interceptors(null_interceptor): post.return_value = cloud_redis.Instance() post_with_metadata.return_value = cloud_redis.Instance(), metadata - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_get_instance_auth_string_rest_bad_request(request_type=cloud_redis.GetInstanceAuthStringRequest): +def test_get_instance_auth_string_rest_bad_request( + request_type=cloud_redis.GetInstanceAuthStringRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -7743,25 +8558,27 @@ def test_get_instance_auth_string_rest_bad_request(request_type=cloud_redis.GetI client.get_instance_auth_string(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceAuthStringRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.GetInstanceAuthStringRequest, + dict, + ], +) def test_get_instance_auth_string_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.InstanceAuthString( - auth_string='auth_string_value', + auth_string="auth_string_value", ) # Wrap the value into a proper Response obj @@ -7771,33 +8588,46 @@ def test_get_instance_auth_string_rest_call_success(request_type): # Convert return value to protobuf type return_value = cloud_redis.InstanceAuthString.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance_auth_string(request) # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.InstanceAuthString) - assert response.auth_string == 'auth_string_value' + assert response.auth_string == "auth_string_value" @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_instance_auth_string_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_auth_string_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_get_instance_auth_string" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, + "post_get_instance_auth_string_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_get_instance_auth_string" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) + pb_message = cloud_redis.GetInstanceAuthStringRequest.pb( + cloud_redis.GetInstanceAuthStringRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7808,11 +8638,13 @@ def test_get_instance_auth_string_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) + return_value = cloud_redis.InstanceAuthString.to_json( + cloud_redis.InstanceAuthString() + ) req.return_value.content = return_value request = cloud_redis.GetInstanceAuthStringRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -7820,27 +8652,37 @@ def test_get_instance_auth_string_rest_interceptors(null_interceptor): post.return_value = cloud_redis.InstanceAuthString() post_with_metadata.return_value = cloud_redis.InstanceAuthString(), metadata - client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_instance_auth_string( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_create_instance_rest_bad_request(request_type=cloud_redis.CreateInstanceRequest): +def test_create_instance_rest_bad_request( + request_type=cloud_redis.CreateInstanceRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -7849,19 +8691,94 @@ def test_create_instance_rest_bad_request(request_type=cloud_redis.CreateInstanc client.create_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.CreateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.CreateInstanceRequest, + dict, + ], +) def test_create_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "labels": {}, + "location_id": "location_id_value", + "alternative_location_id": "alternative_location_id_value", + "redis_version": "redis_version_value", + "reserved_ip_range": "reserved_ip_range_value", + "secondary_ip_range": "secondary_ip_range_value", + "host": "host_value", + "port": 453, + "current_location_id": "current_location_id_value", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status_message": "status_message_value", + "redis_configs": {}, + "tier": 1, + "memory_size_gb": 1499, + "authorized_network": "authorized_network_value", + "persistence_iam_identity": "persistence_iam_identity_value", + "connect_mode": 1, + "auth_enabled": True, + "server_ca_certs": [ + { + "serial_number": "serial_number_value", + "cert": "cert_value", + "create_time": {}, + "expire_time": {}, + "sha1_fingerprint": "sha1_fingerprint_value", + } + ], + "transit_encryption_mode": 1, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "can_reschedule": True, + "schedule_deadline_time": {}, + }, + "replica_count": 1384, + "nodes": [{"id": "id_value", "zone": "zone_value"}], + "read_endpoint": "read_endpoint_value", + "read_endpoint_port": 1920, + "read_replicas_mode": 1, + "customer_managed_key": "customer_managed_key_value", + "persistence_config": { + "persistence_mode": 1, + "rdb_snapshot_period": 3, + "rdb_next_snapshot_time": {}, + "rdb_snapshot_start_time": {}, + }, + "suspension_reasons": [1], + "maintenance_version": "maintenance_version_value", + "available_maintenance_versions": [ + "available_maintenance_versions_value1", + "available_maintenance_versions_value2", + ], + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -7881,7 +8798,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -7895,7 +8812,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -7910,12 +8827,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -7928,15 +8849,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_instance(request) @@ -7949,20 +8870,32 @@ def get_message_fields(field): def test_create_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_create_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_create_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_create_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) + pb_message = cloud_redis.CreateInstanceRequest.pb( + cloud_redis.CreateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7977,7 +8910,7 @@ def test_create_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.CreateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -7985,27 +8918,39 @@ def test_create_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_update_instance_rest_bad_request(request_type=cloud_redis.UpdateInstanceRequest): +def test_update_instance_rest_bad_request( + request_type=cloud_redis.UpdateInstanceRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -8014,19 +8959,96 @@ def test_update_instance_rest_bad_request(request_type=cloud_redis.UpdateInstanc client.update_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpdateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.UpdateInstanceRequest, + dict, + ], +) def test_update_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "display_name": "display_name_value", + "labels": {}, + "location_id": "location_id_value", + "alternative_location_id": "alternative_location_id_value", + "redis_version": "redis_version_value", + "reserved_ip_range": "reserved_ip_range_value", + "secondary_ip_range": "secondary_ip_range_value", + "host": "host_value", + "port": 453, + "current_location_id": "current_location_id_value", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status_message": "status_message_value", + "redis_configs": {}, + "tier": 1, + "memory_size_gb": 1499, + "authorized_network": "authorized_network_value", + "persistence_iam_identity": "persistence_iam_identity_value", + "connect_mode": 1, + "auth_enabled": True, + "server_ca_certs": [ + { + "serial_number": "serial_number_value", + "cert": "cert_value", + "create_time": {}, + "expire_time": {}, + "sha1_fingerprint": "sha1_fingerprint_value", + } + ], + "transit_encryption_mode": 1, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "can_reschedule": True, + "schedule_deadline_time": {}, + }, + "replica_count": 1384, + "nodes": [{"id": "id_value", "zone": "zone_value"}], + "read_endpoint": "read_endpoint_value", + "read_endpoint_port": 1920, + "read_replicas_mode": 1, + "customer_managed_key": "customer_managed_key_value", + "persistence_config": { + "persistence_mode": 1, + "rdb_snapshot_period": 3, + "rdb_next_snapshot_time": {}, + "rdb_snapshot_start_time": {}, + }, + "suspension_reasons": [1], + "maintenance_version": "maintenance_version_value", + "available_maintenance_versions": [ + "available_maintenance_versions_value1", + "available_maintenance_versions_value2", + ], + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -8046,7 +9068,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -8060,7 +9082,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -8075,12 +9097,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -8093,15 +9119,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_instance(request) @@ -8114,20 +9140,32 @@ def get_message_fields(field): def test_update_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_update_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_update_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_update_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_update_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) + pb_message = cloud_redis.UpdateInstanceRequest.pb( + cloud_redis.UpdateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8142,7 +9180,7 @@ def test_update_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.UpdateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -8150,27 +9188,37 @@ def test_update_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_upgrade_instance_rest_bad_request(request_type=cloud_redis.UpgradeInstanceRequest): +def test_upgrade_instance_rest_bad_request( + request_type=cloud_redis.UpgradeInstanceRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -8179,30 +9227,32 @@ def test_upgrade_instance_rest_bad_request(request_type=cloud_redis.UpgradeInsta client.upgrade_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpgradeInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.UpgradeInstanceRequest, + dict, + ], +) def test_upgrade_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.upgrade_instance(request) @@ -8215,20 +9265,32 @@ def test_upgrade_instance_rest_call_success(request_type): def test_upgrade_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_upgrade_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_upgrade_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_upgrade_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_upgrade_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_upgrade_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_upgrade_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.UpgradeInstanceRequest.pb(cloud_redis.UpgradeInstanceRequest()) + pb_message = cloud_redis.UpgradeInstanceRequest.pb( + cloud_redis.UpgradeInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8243,7 +9305,7 @@ def test_upgrade_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.UpgradeInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -8251,27 +9313,37 @@ def test_upgrade_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.upgrade_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.upgrade_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_import_instance_rest_bad_request(request_type=cloud_redis.ImportInstanceRequest): +def test_import_instance_rest_bad_request( + request_type=cloud_redis.ImportInstanceRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -8280,30 +9352,32 @@ def test_import_instance_rest_bad_request(request_type=cloud_redis.ImportInstanc client.import_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.ImportInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ImportInstanceRequest, + dict, + ], +) def test_import_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.import_instance(request) @@ -8316,20 +9390,32 @@ def test_import_instance_rest_call_success(request_type): def test_import_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_import_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_import_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_import_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_import_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_import_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_import_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.ImportInstanceRequest.pb(cloud_redis.ImportInstanceRequest()) + pb_message = cloud_redis.ImportInstanceRequest.pb( + cloud_redis.ImportInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8344,7 +9430,7 @@ def test_import_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.ImportInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -8352,27 +9438,37 @@ def test_import_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.import_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.import_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_export_instance_rest_bad_request(request_type=cloud_redis.ExportInstanceRequest): +def test_export_instance_rest_bad_request( + request_type=cloud_redis.ExportInstanceRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -8381,30 +9477,32 @@ def test_export_instance_rest_bad_request(request_type=cloud_redis.ExportInstanc client.export_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.ExportInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ExportInstanceRequest, + dict, + ], +) def test_export_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_instance(request) @@ -8417,20 +9515,32 @@ def test_export_instance_rest_call_success(request_type): def test_export_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_export_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_export_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_export_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_export_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_export_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_export_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.ExportInstanceRequest.pb(cloud_redis.ExportInstanceRequest()) + pb_message = cloud_redis.ExportInstanceRequest.pb( + cloud_redis.ExportInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8445,7 +9555,7 @@ def test_export_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.ExportInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -8453,27 +9563,37 @@ def test_export_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.export_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.export_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_failover_instance_rest_bad_request(request_type=cloud_redis.FailoverInstanceRequest): +def test_failover_instance_rest_bad_request( + request_type=cloud_redis.FailoverInstanceRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -8482,30 +9602,32 @@ def test_failover_instance_rest_bad_request(request_type=cloud_redis.FailoverIns client.failover_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.FailoverInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.FailoverInstanceRequest, + dict, + ], +) def test_failover_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.failover_instance(request) @@ -8518,20 +9640,32 @@ def test_failover_instance_rest_call_success(request_type): def test_failover_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_failover_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_failover_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_failover_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_failover_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_failover_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_failover_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.FailoverInstanceRequest.pb(cloud_redis.FailoverInstanceRequest()) + pb_message = cloud_redis.FailoverInstanceRequest.pb( + cloud_redis.FailoverInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8546,7 +9680,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.FailoverInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -8554,27 +9688,37 @@ def test_failover_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.failover_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.failover_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_delete_instance_rest_bad_request(request_type=cloud_redis.DeleteInstanceRequest): +def test_delete_instance_rest_bad_request( + request_type=cloud_redis.DeleteInstanceRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -8583,30 +9727,32 @@ def test_delete_instance_rest_bad_request(request_type=cloud_redis.DeleteInstanc client.delete_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.DeleteInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.DeleteInstanceRequest, + dict, + ], +) def test_delete_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_instance(request) @@ -8619,20 +9765,32 @@ def test_delete_instance_rest_call_success(request_type): def test_delete_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_delete_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_delete_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_delete_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) + pb_message = cloud_redis.DeleteInstanceRequest.pb( + cloud_redis.DeleteInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8647,7 +9805,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.DeleteInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -8655,27 +9813,37 @@ def test_delete_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_reschedule_maintenance_rest_bad_request(request_type=cloud_redis.RescheduleMaintenanceRequest): +def test_reschedule_maintenance_rest_bad_request( + request_type=cloud_redis.RescheduleMaintenanceRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -8684,30 +9852,32 @@ def test_reschedule_maintenance_rest_bad_request(request_type=cloud_redis.Resche client.reschedule_maintenance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.RescheduleMaintenanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.RescheduleMaintenanceRequest, + dict, + ], +) def test_reschedule_maintenance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.reschedule_maintenance(request) @@ -8720,20 +9890,33 @@ def test_reschedule_maintenance_rest_call_success(request_type): def test_reschedule_maintenance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_reschedule_maintenance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_reschedule_maintenance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, + "post_reschedule_maintenance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_reschedule_maintenance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) + pb_message = cloud_redis.RescheduleMaintenanceRequest.pb( + cloud_redis.RescheduleMaintenanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -8748,7 +9931,7 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.RescheduleMaintenanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -8756,7 +9939,13 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.reschedule_maintenance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -8769,13 +9958,18 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -8784,20 +9978,23 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq client.get_location(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) def test_get_location_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -8805,7 +10002,7 @@ def test_get_location_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8816,19 +10013,24 @@ def test_get_location_rest(request_type): assert isinstance(response, locations_pb2.Location) -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -8837,20 +10039,23 @@ def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocation client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) def test_list_locations_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -8858,7 +10063,7 @@ def test_list_locations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8869,19 +10074,26 @@ def test_list_locations_rest(request_type): assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -8890,28 +10102,31 @@ def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOpe client.cancel_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) def test_cancel_operation_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8922,19 +10137,26 @@ def test_cancel_operation_rest(request_type): assert response is None -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -8943,28 +10165,31 @@ def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOpe client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) def test_delete_operation_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -8975,19 +10200,26 @@ def test_delete_operation_rest(request_type): assert response is None -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -8996,20 +10228,23 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -9017,7 +10252,7 @@ def test_get_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9028,19 +10263,26 @@ def test_get_operation_rest(request_type): assert isinstance(response, operations_pb2.Operation) -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -9049,20 +10291,23 @@ def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperat client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) def test_list_operations_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -9070,7 +10315,7 @@ def test_list_operations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9081,19 +10326,26 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_wait_operation_rest_bad_request(request_type=operations_pb2.WaitOperationRequest): +def test_wait_operation_rest_bad_request( + request_type=operations_pb2.WaitOperationRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -9102,20 +10354,23 @@ def test_wait_operation_rest_bad_request(request_type=operations_pb2.WaitOperati client.wait_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.WaitOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.WaitOperationRequest, + dict, + ], +) def test_wait_operation_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -9123,7 +10378,7 @@ def test_wait_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -9133,10 +10388,10 @@ def test_wait_operation_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + def test_initialize_client_w_rest(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) assert client is not None @@ -9150,9 +10405,7 @@ def test_list_instances_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: client.list_instances(request=None) # Establish that the underlying stub method was called. @@ -9172,9 +10425,7 @@ def test_get_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: client.get_instance(request=None) # Establish that the underlying stub method was called. @@ -9195,8 +10446,8 @@ def test_get_instance_auth_string_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: + type(client.transport.get_instance_auth_string), "__call__" + ) as call: client.get_instance_auth_string(request=None) # Establish that the underlying stub method was called. @@ -9216,9 +10467,7 @@ def test_create_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: client.create_instance(request=None) # Establish that the underlying stub method was called. @@ -9238,9 +10487,7 @@ def test_update_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: client.update_instance(request=None) # Establish that the underlying stub method was called. @@ -9260,9 +10507,7 @@ def test_upgrade_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: client.upgrade_instance(request=None) # Establish that the underlying stub method was called. @@ -9282,9 +10527,7 @@ def test_import_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: client.import_instance(request=None) # Establish that the underlying stub method was called. @@ -9304,9 +10547,7 @@ def test_export_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: client.export_instance(request=None) # Establish that the underlying stub method was called. @@ -9327,8 +10568,8 @@ def test_failover_instance_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: + type(client.transport.failover_instance), "__call__" + ) as call: client.failover_instance(request=None) # Establish that the underlying stub method was called. @@ -9348,9 +10589,7 @@ def test_delete_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: client.delete_instance(request=None) # Establish that the underlying stub method was called. @@ -9371,8 +10610,8 @@ def test_reschedule_maintenance_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: + type(client.transport.reschedule_maintenance), "__call__" + ) as call: client.reschedule_maintenance(request=None) # Establish that the underlying stub method was called. @@ -9393,15 +10632,18 @@ def test_cloud_redis_rest_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AbstractOperationsClient, + operations_v1.AbstractOperationsClient, ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + def test_transport_kind_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = CloudRedisAsyncClient.get_transport_class("rest_asyncio")( credentials=async_anonymous_credentials() ) @@ -9409,22 +10651,28 @@ def test_transport_kind_rest_asyncio(): @pytest.mark.asyncio -async def test_list_instances_rest_asyncio_bad_request(request_type=cloud_redis.ListInstancesRequest): +async def test_list_instances_rest_asyncio_bad_request( + request_type=cloud_redis.ListInstancesRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -9433,28 +10681,32 @@ async def test_list_instances_rest_asyncio_bad_request(request_type=cloud_redis. @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.ListInstancesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ListInstancesRequest, + dict, + ], +) async def test_list_instances_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -9464,37 +10716,54 @@ async def test_list_instances_rest_asyncio_call_success(request_type): # Convert return value to protobuf type return_value = cloud_redis.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.list_instances(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_list_instances_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_list_instances") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_list_instances" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_list_instances_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_list_instances" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) + pb_message = cloud_redis.ListInstancesRequest.pb( + cloud_redis.ListInstancesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9505,11 +10774,13 @@ async def test_list_instances_rest_asyncio_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) + return_value = cloud_redis.ListInstancesResponse.to_json( + cloud_redis.ListInstancesResponse() + ) req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.ListInstancesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -9517,29 +10788,42 @@ async def test_list_instances_rest_asyncio_interceptors(null_interceptor): post.return_value = cloud_redis.ListInstancesResponse() post_with_metadata.return_value = cloud_redis.ListInstancesResponse(), metadata - await client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_get_instance_rest_asyncio_bad_request(request_type=cloud_redis.GetInstanceRequest): +async def test_get_instance_rest_asyncio_bad_request( + request_type=cloud_redis.GetInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -9548,53 +10832,59 @@ async def test_get_instance_rest_asyncio_bad_request(request_type=cloud_redis.Ge @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.GetInstanceRequest, + dict, + ], +) async def test_get_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", + port=453, + current_location_id="current_location_id_value", + state=cloud_redis.Instance.State.CREATING, + status_message="status_message_value", + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint="read_endpoint_value", + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key="customer_managed_key_value", + suspension_reasons=[ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) # Wrap the value into a proper Response obj @@ -9604,58 +10894,82 @@ async def test_get_instance_rest_asyncio_call_success(request_type): # Convert return value to protobuf type return_value = cloud_redis.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.get_instance(request) # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True - assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert ( + response.transit_encryption_mode + == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + ) assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 - assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' - assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert ( + response.read_replicas_mode + == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + ) + assert response.customer_managed_key == "customer_managed_key_value" + assert response.suspension_reasons == [ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == [ + "available_maintenance_versions_value" + ] @pytest.mark.asyncio @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_get_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_get_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_get_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_get_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -9674,7 +10988,7 @@ async def test_get_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.GetInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -9682,29 +10996,42 @@ async def test_get_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = cloud_redis.Instance() post_with_metadata.return_value = cloud_redis.Instance(), metadata - await client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_get_instance_auth_string_rest_asyncio_bad_request(request_type=cloud_redis.GetInstanceAuthStringRequest): +async def test_get_instance_auth_string_rest_asyncio_bad_request( + request_type=cloud_redis.GetInstanceAuthStringRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -9713,27 +11040,31 @@ async def test_get_instance_auth_string_rest_asyncio_bad_request(request_type=cl @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceAuthStringRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.GetInstanceAuthStringRequest, + dict, + ], +) async def test_get_instance_auth_string_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.InstanceAuthString( - auth_string='auth_string_value', + auth_string="auth_string_value", ) # Wrap the value into a proper Response obj @@ -9743,36 +11074,53 @@ async def test_get_instance_auth_string_rest_asyncio_call_success(request_type): # Convert return value to protobuf type return_value = cloud_redis.InstanceAuthString.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.get_instance_auth_string(request) # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.InstanceAuthString) - assert response.auth_string == 'auth_string_value' + assert response.auth_string == "auth_string_value" @pytest.mark.asyncio @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_get_instance_auth_string_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_auth_string_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_get_instance_auth_string" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_get_instance_auth_string_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_get_instance_auth_string" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) + pb_message = cloud_redis.GetInstanceAuthStringRequest.pb( + cloud_redis.GetInstanceAuthStringRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9783,11 +11131,13 @@ async def test_get_instance_auth_string_rest_asyncio_interceptors(null_intercept req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) + return_value = cloud_redis.InstanceAuthString.to_json( + cloud_redis.InstanceAuthString() + ) req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.GetInstanceAuthStringRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -9795,29 +11145,42 @@ async def test_get_instance_auth_string_rest_asyncio_interceptors(null_intercept post.return_value = cloud_redis.InstanceAuthString() post_with_metadata.return_value = cloud_redis.InstanceAuthString(), metadata - await client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.get_instance_auth_string( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_create_instance_rest_asyncio_bad_request(request_type=cloud_redis.CreateInstanceRequest): +async def test_create_instance_rest_asyncio_bad_request( + request_type=cloud_redis.CreateInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -9826,21 +11189,98 @@ async def test_create_instance_rest_asyncio_bad_request(request_type=cloud_redis @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.CreateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.CreateInstanceRequest, + dict, + ], +) async def test_create_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "labels": {}, + "location_id": "location_id_value", + "alternative_location_id": "alternative_location_id_value", + "redis_version": "redis_version_value", + "reserved_ip_range": "reserved_ip_range_value", + "secondary_ip_range": "secondary_ip_range_value", + "host": "host_value", + "port": 453, + "current_location_id": "current_location_id_value", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status_message": "status_message_value", + "redis_configs": {}, + "tier": 1, + "memory_size_gb": 1499, + "authorized_network": "authorized_network_value", + "persistence_iam_identity": "persistence_iam_identity_value", + "connect_mode": 1, + "auth_enabled": True, + "server_ca_certs": [ + { + "serial_number": "serial_number_value", + "cert": "cert_value", + "create_time": {}, + "expire_time": {}, + "sha1_fingerprint": "sha1_fingerprint_value", + } + ], + "transit_encryption_mode": 1, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "can_reschedule": True, + "schedule_deadline_time": {}, + }, + "replica_count": 1384, + "nodes": [{"id": "id_value", "zone": "zone_value"}], + "read_endpoint": "read_endpoint_value", + "read_endpoint_port": 1920, + "read_replicas_mode": 1, + "customer_managed_key": "customer_managed_key_value", + "persistence_config": { + "persistence_mode": 1, + "rdb_snapshot_period": 3, + "rdb_next_snapshot_time": {}, + "rdb_snapshot_start_time": {}, + }, + "suspension_reasons": [1], + "maintenance_version": "maintenance_version_value", + "available_maintenance_versions": [ + "available_maintenance_versions_value1", + "available_maintenance_versions_value2", + ], + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -9860,7 +11300,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -9874,7 +11314,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -9889,12 +11329,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -9907,15 +11351,17 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.create_instance(request) @@ -9928,23 +11374,38 @@ def get_message_fields(field): @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_create_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_create_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_create_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_create_instance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_create_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) + pb_message = cloud_redis.CreateInstanceRequest.pb( + cloud_redis.CreateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -9959,7 +11420,7 @@ async def test_create_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.CreateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -9967,29 +11428,44 @@ async def test_create_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - await client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_update_instance_rest_asyncio_bad_request(request_type=cloud_redis.UpdateInstanceRequest): +async def test_update_instance_rest_asyncio_bad_request( + request_type=cloud_redis.UpdateInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -9998,21 +11474,100 @@ async def test_update_instance_rest_asyncio_bad_request(request_type=cloud_redis @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpdateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.UpdateInstanceRequest, + dict, + ], +) async def test_update_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "display_name": "display_name_value", + "labels": {}, + "location_id": "location_id_value", + "alternative_location_id": "alternative_location_id_value", + "redis_version": "redis_version_value", + "reserved_ip_range": "reserved_ip_range_value", + "secondary_ip_range": "secondary_ip_range_value", + "host": "host_value", + "port": 453, + "current_location_id": "current_location_id_value", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status_message": "status_message_value", + "redis_configs": {}, + "tier": 1, + "memory_size_gb": 1499, + "authorized_network": "authorized_network_value", + "persistence_iam_identity": "persistence_iam_identity_value", + "connect_mode": 1, + "auth_enabled": True, + "server_ca_certs": [ + { + "serial_number": "serial_number_value", + "cert": "cert_value", + "create_time": {}, + "expire_time": {}, + "sha1_fingerprint": "sha1_fingerprint_value", + } + ], + "transit_encryption_mode": 1, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "can_reschedule": True, + "schedule_deadline_time": {}, + }, + "replica_count": 1384, + "nodes": [{"id": "id_value", "zone": "zone_value"}], + "read_endpoint": "read_endpoint_value", + "read_endpoint_port": 1920, + "read_replicas_mode": 1, + "customer_managed_key": "customer_managed_key_value", + "persistence_config": { + "persistence_mode": 1, + "rdb_snapshot_period": 3, + "rdb_next_snapshot_time": {}, + "rdb_snapshot_start_time": {}, + }, + "suspension_reasons": [1], + "maintenance_version": "maintenance_version_value", + "available_maintenance_versions": [ + "available_maintenance_versions_value1", + "available_maintenance_versions_value2", + ], + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -10032,7 +11587,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -10046,7 +11601,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -10061,12 +11616,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -10079,15 +11638,17 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.update_instance(request) @@ -10100,23 +11661,38 @@ def get_message_fields(field): @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_update_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_update_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_update_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_update_instance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_update_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) + pb_message = cloud_redis.UpdateInstanceRequest.pb( + cloud_redis.UpdateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10131,7 +11707,7 @@ async def test_update_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.UpdateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10139,29 +11715,42 @@ async def test_update_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - await client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_upgrade_instance_rest_asyncio_bad_request(request_type=cloud_redis.UpgradeInstanceRequest): +async def test_upgrade_instance_rest_asyncio_bad_request( + request_type=cloud_redis.UpgradeInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -10170,32 +11759,38 @@ async def test_upgrade_instance_rest_asyncio_bad_request(request_type=cloud_redi @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpgradeInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.UpgradeInstanceRequest, + dict, + ], +) async def test_upgrade_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.upgrade_instance(request) @@ -10208,23 +11803,38 @@ async def test_upgrade_instance_rest_asyncio_call_success(request_type): @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_upgrade_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_upgrade_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_upgrade_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_upgrade_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_upgrade_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_upgrade_instance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_upgrade_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.UpgradeInstanceRequest.pb(cloud_redis.UpgradeInstanceRequest()) + pb_message = cloud_redis.UpgradeInstanceRequest.pb( + cloud_redis.UpgradeInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10239,7 +11849,7 @@ async def test_upgrade_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.UpgradeInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10247,29 +11857,42 @@ async def test_upgrade_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - await client.upgrade_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.upgrade_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_import_instance_rest_asyncio_bad_request(request_type=cloud_redis.ImportInstanceRequest): +async def test_import_instance_rest_asyncio_bad_request( + request_type=cloud_redis.ImportInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -10278,32 +11901,38 @@ async def test_import_instance_rest_asyncio_bad_request(request_type=cloud_redis @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.ImportInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ImportInstanceRequest, + dict, + ], +) async def test_import_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.import_instance(request) @@ -10316,23 +11945,38 @@ async def test_import_instance_rest_asyncio_call_success(request_type): @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_import_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_import_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_import_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_import_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_import_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_import_instance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_import_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.ImportInstanceRequest.pb(cloud_redis.ImportInstanceRequest()) + pb_message = cloud_redis.ImportInstanceRequest.pb( + cloud_redis.ImportInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10347,7 +11991,7 @@ async def test_import_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.ImportInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10355,29 +11999,42 @@ async def test_import_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - await client.import_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.import_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_export_instance_rest_asyncio_bad_request(request_type=cloud_redis.ExportInstanceRequest): +async def test_export_instance_rest_asyncio_bad_request( + request_type=cloud_redis.ExportInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -10386,32 +12043,38 @@ async def test_export_instance_rest_asyncio_bad_request(request_type=cloud_redis @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.ExportInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ExportInstanceRequest, + dict, + ], +) async def test_export_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.export_instance(request) @@ -10424,23 +12087,38 @@ async def test_export_instance_rest_asyncio_call_success(request_type): @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_export_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_export_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_export_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_export_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_export_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_export_instance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_export_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.ExportInstanceRequest.pb(cloud_redis.ExportInstanceRequest()) + pb_message = cloud_redis.ExportInstanceRequest.pb( + cloud_redis.ExportInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10455,7 +12133,7 @@ async def test_export_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.ExportInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10463,29 +12141,42 @@ async def test_export_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - await client.export_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.export_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_failover_instance_rest_asyncio_bad_request(request_type=cloud_redis.FailoverInstanceRequest): +async def test_failover_instance_rest_asyncio_bad_request( + request_type=cloud_redis.FailoverInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -10494,32 +12185,38 @@ async def test_failover_instance_rest_asyncio_bad_request(request_type=cloud_red @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.FailoverInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.FailoverInstanceRequest, + dict, + ], +) async def test_failover_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.failover_instance(request) @@ -10532,23 +12229,38 @@ async def test_failover_instance_rest_asyncio_call_success(request_type): @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_failover_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_failover_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_failover_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_failover_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_failover_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_failover_instance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_failover_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.FailoverInstanceRequest.pb(cloud_redis.FailoverInstanceRequest()) + pb_message = cloud_redis.FailoverInstanceRequest.pb( + cloud_redis.FailoverInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10563,7 +12275,7 @@ async def test_failover_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.FailoverInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10571,29 +12283,42 @@ async def test_failover_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - await client.failover_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.failover_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_delete_instance_rest_asyncio_bad_request(request_type=cloud_redis.DeleteInstanceRequest): +async def test_delete_instance_rest_asyncio_bad_request( + request_type=cloud_redis.DeleteInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -10602,32 +12327,38 @@ async def test_delete_instance_rest_asyncio_bad_request(request_type=cloud_redis @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.DeleteInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.DeleteInstanceRequest, + dict, + ], +) async def test_delete_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.delete_instance(request) @@ -10640,23 +12371,38 @@ async def test_delete_instance_rest_asyncio_call_success(request_type): @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_delete_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_delete_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_delete_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_delete_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_delete_instance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_delete_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) + pb_message = cloud_redis.DeleteInstanceRequest.pb( + cloud_redis.DeleteInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10671,7 +12417,7 @@ async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.DeleteInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10679,29 +12425,42 @@ async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - await client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_reschedule_maintenance_rest_asyncio_bad_request(request_type=cloud_redis.RescheduleMaintenanceRequest): +async def test_reschedule_maintenance_rest_asyncio_bad_request( + request_type=cloud_redis.RescheduleMaintenanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -10710,32 +12469,38 @@ async def test_reschedule_maintenance_rest_asyncio_bad_request(request_type=clou @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.RescheduleMaintenanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.RescheduleMaintenanceRequest, + dict, + ], +) async def test_reschedule_maintenance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.reschedule_maintenance(request) @@ -10748,23 +12513,38 @@ async def test_reschedule_maintenance_rest_asyncio_call_success(request_type): @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_reschedule_maintenance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_reschedule_maintenance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_reschedule_maintenance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_reschedule_maintenance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_reschedule_maintenance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) + pb_message = cloud_redis.RescheduleMaintenanceRequest.pb( + cloud_redis.RescheduleMaintenanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -10779,7 +12559,7 @@ async def test_reschedule_maintenance_rest_asyncio_interceptors(null_interceptor req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.RescheduleMaintenanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -10787,51 +12567,73 @@ async def test_reschedule_maintenance_rest_asyncio_interceptors(null_interceptor post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - await client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.reschedule_maintenance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_get_location_rest_asyncio_bad_request(request_type=locations_pb2.GetLocationRequest): +async def test_get_location_rest_asyncio_bad_request( + request_type=locations_pb2.GetLocationRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.get_location(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) async def test_get_location_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -10839,7 +12641,9 @@ async def test_get_location_rest_asyncio(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10849,45 +12653,59 @@ async def test_get_location_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.Location) + @pytest.mark.asyncio -async def test_list_locations_rest_asyncio_bad_request(request_type=locations_pb2.ListLocationsRequest): +async def test_list_locations_rest_asyncio_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.list_locations(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) async def test_list_locations_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -10895,7 +12713,9 @@ async def test_list_locations_rest_asyncio(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10905,53 +12725,71 @@ async def test_list_locations_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) + @pytest.mark.asyncio -async def test_cancel_operation_rest_asyncio_bad_request(request_type=operations_pb2.CancelOperationRequest): +async def test_cancel_operation_rest_asyncio_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.cancel_operation(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) async def test_cancel_operation_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + json_return_value = "{}" + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -10961,53 +12799,71 @@ async def test_cancel_operation_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio -async def test_delete_operation_rest_asyncio_bad_request(request_type=operations_pb2.DeleteOperationRequest): +async def test_delete_operation_rest_asyncio_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.delete_operation(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) async def test_delete_operation_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + json_return_value = "{}" + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11017,45 +12873,61 @@ async def test_delete_operation_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio -async def test_get_operation_rest_asyncio_bad_request(request_type=operations_pb2.GetOperationRequest): +async def test_get_operation_rest_asyncio_bad_request( + request_type=operations_pb2.GetOperationRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.get_operation(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) async def test_get_operation_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -11063,7 +12935,9 @@ async def test_get_operation_rest_asyncio(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11073,45 +12947,61 @@ async def test_get_operation_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio -async def test_list_operations_rest_asyncio_bad_request(request_type=operations_pb2.ListOperationsRequest): +async def test_list_operations_rest_asyncio_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.list_operations(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) async def test_list_operations_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -11119,7 +13009,9 @@ async def test_list_operations_rest_asyncio(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11129,45 +13021,61 @@ async def test_list_operations_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + @pytest.mark.asyncio -async def test_wait_operation_rest_asyncio_bad_request(request_type=operations_pb2.WaitOperationRequest): +async def test_wait_operation_rest_asyncio_bad_request( + request_type=operations_pb2.WaitOperationRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.wait_operation(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - operations_pb2.WaitOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.WaitOperationRequest, + dict, + ], +) async def test_wait_operation_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -11175,7 +13083,9 @@ async def test_wait_operation_rest_asyncio(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -11185,12 +13095,14 @@ async def test_wait_operation_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + def test_initialize_client_w_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) assert client is not None @@ -11200,16 +13112,16 @@ def test_initialize_client_w_rest_asyncio(): @pytest.mark.asyncio async def test_list_instances_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: await client.list_instances(request=None) # Establish that the underlying stub method was called. @@ -11225,16 +13137,16 @@ async def test_list_instances_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_get_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: await client.get_instance(request=None) # Establish that the underlying stub method was called. @@ -11250,7 +13162,9 @@ async def test_get_instance_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_get_instance_auth_string_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", @@ -11258,8 +13172,8 @@ async def test_get_instance_auth_string_empty_call_rest_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: + type(client.transport.get_instance_auth_string), "__call__" + ) as call: await client.get_instance_auth_string(request=None) # Establish that the underlying stub method was called. @@ -11275,16 +13189,16 @@ async def test_get_instance_auth_string_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_create_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: await client.create_instance(request=None) # Establish that the underlying stub method was called. @@ -11300,16 +13214,16 @@ async def test_create_instance_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_update_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: await client.update_instance(request=None) # Establish that the underlying stub method was called. @@ -11325,16 +13239,16 @@ async def test_update_instance_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_upgrade_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call: await client.upgrade_instance(request=None) # Establish that the underlying stub method was called. @@ -11350,16 +13264,16 @@ async def test_upgrade_instance_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_import_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.import_instance), "__call__") as call: await client.import_instance(request=None) # Establish that the underlying stub method was called. @@ -11375,16 +13289,16 @@ async def test_import_instance_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_export_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.export_instance), "__call__") as call: await client.export_instance(request=None) # Establish that the underlying stub method was called. @@ -11400,7 +13314,9 @@ async def test_export_instance_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_failover_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", @@ -11408,8 +13324,8 @@ async def test_failover_instance_empty_call_rest_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: + type(client.transport.failover_instance), "__call__" + ) as call: await client.failover_instance(request=None) # Establish that the underlying stub method was called. @@ -11425,16 +13341,16 @@ async def test_failover_instance_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_delete_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: await client.delete_instance(request=None) # Establish that the underlying stub method was called. @@ -11450,7 +13366,9 @@ async def test_delete_instance_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_reschedule_maintenance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", @@ -11458,8 +13376,8 @@ async def test_reschedule_maintenance_empty_call_rest_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: + type(client.transport.reschedule_maintenance), "__call__" + ) as call: await client.reschedule_maintenance(request=None) # Establish that the underlying stub method was called. @@ -11472,7 +13390,9 @@ async def test_reschedule_maintenance_empty_call_rest_asyncio(): def test_cloud_redis_rest_asyncio_lro_client(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", @@ -11482,22 +13402,28 @@ def test_cloud_redis_rest_asyncio_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AsyncOperationsRestClient, + operations_v1.AsyncOperationsRestClient, ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + def test_unsupported_parameter_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) options = client_options.ClientOptions(quota_project_id="octopus") - with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore + with pytest.raises( + core_exceptions.AsyncRestUnsupportedParameterError, + match="google.api_core.client_options.ClientOptions.quota_project_id", + ) as exc: # type: ignore client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", - client_options=options - ) + client_options=options, + ) def test_transport_grpc_default(): @@ -11510,18 +13436,21 @@ def test_transport_grpc_default(): transports.CloudRedisGrpcTransport, ) + def test_cloud_redis_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.CloudRedisTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_cloud_redis_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__') as Transport: + with mock.patch( + "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.CloudRedisTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -11530,24 +13459,24 @@ def test_cloud_redis_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_instances', - 'get_instance', - 'get_instance_auth_string', - 'create_instance', - 'update_instance', - 'upgrade_instance', - 'import_instance', - 'export_instance', - 'failover_instance', - 'delete_instance', - 'reschedule_maintenance', - 'get_location', - 'list_locations', - 'get_operation', - 'wait_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + "list_instances", + "get_instance", + "get_instance_auth_string", + "create_instance", + "update_instance", + "upgrade_instance", + "import_instance", + "export_instance", + "failover_instance", + "delete_instance", + "reschedule_maintenance", + "get_location", + "list_locations", + "get_operation", + "wait_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -11563,7 +13492,7 @@ def test_cloud_redis_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -11572,25 +13501,36 @@ def test_cloud_redis_base_transport(): def test_cloud_redis_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudRedisTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_cloud_redis_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudRedisTransport() @@ -11599,14 +13539,12 @@ def test_cloud_redis_base_transport_with_adc(): def test_cloud_redis_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) CloudRedisClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @@ -11621,12 +13559,12 @@ def test_cloud_redis_auth_adc(): def test_cloud_redis_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @@ -11640,48 +13578,46 @@ def test_cloud_redis_transport_auth_adc(transport_class): ], ) def test_cloud_redis_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.CloudRedisGrpcTransport, grpc_helpers), - (transports.CloudRedisGrpcAsyncIOTransport, grpc_helpers_async) + (transports.CloudRedisGrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "redis.googleapis.com:443", credentials=creds, credentials_file=None, quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=["1", "2"], default_host="redis.googleapis.com", ssl_credentials=None, @@ -11692,10 +13628,11 @@ def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): ) -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport], +) +def test_cloud_redis_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -11704,7 +13641,7 @@ def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -11725,61 +13662,77 @@ def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) + def test_cloud_redis_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.CloudRedisRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudRedisRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_cloud_redis_host_no_port(transport_name): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="redis.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'redis.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://redis.googleapis.com' + "redis.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://redis.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_cloud_redis_host_with_port(transport_name): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="redis.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'redis.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://redis.googleapis.com:8000' + "redis.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://redis.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_cloud_redis_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -11824,8 +13777,10 @@ def test_cloud_redis_client_transport_session_collision(transport_name): session1 = client1.transport.reschedule_maintenance._session session2 = client2.transport.reschedule_maintenance._session assert session1 != session2 + + def test_cloud_redis_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudRedisGrpcTransport( @@ -11838,7 +13793,7 @@ def test_cloud_redis_grpc_transport_channel(): def test_cloud_redis_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudRedisGrpcAsyncIOTransport( @@ -11853,12 +13808,17 @@ def test_cloud_redis_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: +@pytest.mark.parametrize( + "transport_class", + [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport], +) +def test_cloud_redis_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -11867,7 +13827,7 @@ def test_cloud_redis_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -11897,17 +13857,20 @@ def test_cloud_redis_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport], +) +def test_cloud_redis_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -11938,7 +13901,7 @@ def test_cloud_redis_transport_channel_mtls_with_adc( def test_cloud_redis_grpc_lro_client(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) transport = client.transport @@ -11955,7 +13918,7 @@ def test_cloud_redis_grpc_lro_client(): def test_cloud_redis_grpc_lro_async_client(): client = CloudRedisAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', + transport="grpc_asyncio", ) transport = client.transport @@ -11973,7 +13936,11 @@ def test_instance_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) actual = CloudRedisClient.instance_path(project, location, instance) assert expected == actual @@ -11990,9 +13957,12 @@ def test_parse_instance_path(): actual = CloudRedisClient.parse_instance_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = CloudRedisClient.common_billing_account_path(billing_account) assert expected == actual @@ -12007,9 +13977,12 @@ def test_parse_common_billing_account_path(): actual = CloudRedisClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = CloudRedisClient.common_folder_path(folder) assert expected == actual @@ -12024,9 +13997,12 @@ def test_parse_common_folder_path(): actual = CloudRedisClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = CloudRedisClient.common_organization_path(organization) assert expected == actual @@ -12041,9 +14017,12 @@ def test_parse_common_organization_path(): actual = CloudRedisClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = CloudRedisClient.common_project_path(project) assert expected == actual @@ -12058,10 +14037,14 @@ def test_parse_common_project_path(): actual = CloudRedisClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = CloudRedisClient.common_location_path(project, location) assert expected == actual @@ -12081,14 +14064,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.CloudRedisTransport, "_prep_wrapped_messages" + ) as prep: client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.CloudRedisTransport, "_prep_wrapped_messages" + ) as prep: transport_class = CloudRedisClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -12099,7 +14086,8 @@ def test_client_with_default_client_info(): def test_delete_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12119,10 +14107,12 @@ def test_delete_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12132,9 +14122,7 @@ async def test_delete_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12157,7 +14145,7 @@ def test_delete_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.delete_operation(request) # Establish that the underlying gRPC stub method was called. @@ -12167,7 +14155,11 @@ def test_delete_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): @@ -12182,9 +14174,7 @@ async def test_delete_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12193,7 +14183,10 @@ async def test_delete_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_delete_operation_from_dict(): @@ -12212,6 +14205,7 @@ def test_delete_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = CloudRedisAsyncClient( @@ -12220,9 +14214,7 @@ async def test_delete_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_operation( request={ "name": "locations", @@ -12246,6 +14238,7 @@ def test_delete_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.DeleteOperationRequest() + @pytest.mark.asyncio async def test_delete_operation_flattened_async(): client = CloudRedisAsyncClient( @@ -12254,9 +14247,7 @@ async def test_delete_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12266,7 +14257,8 @@ async def test_delete_operation_flattened_async(): def test_cancel_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12286,10 +14278,12 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12299,9 +14293,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12324,7 +14316,7 @@ def test_cancel_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. @@ -12334,7 +14326,11 @@ def test_cancel_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): @@ -12349,9 +14345,7 @@ async def test_cancel_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12360,7 +14354,10 @@ async def test_cancel_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_cancel_operation_from_dict(): @@ -12379,6 +14376,7 @@ def test_cancel_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = CloudRedisAsyncClient( @@ -12387,9 +14385,7 @@ async def test_cancel_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation( request={ "name": "locations", @@ -12413,6 +14409,7 @@ def test_cancel_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.CancelOperationRequest() + @pytest.mark.asyncio async def test_cancel_operation_flattened_async(): client = CloudRedisAsyncClient( @@ -12421,9 +14418,7 @@ async def test_cancel_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12433,7 +14428,8 @@ async def test_cancel_operation_flattened_async(): def test_wait_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12453,10 +14449,12 @@ def test_wait_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_wait_operation(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12501,7 +14499,11 @@ def test_wait_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_wait_operation_field_headers_async(): @@ -12527,7 +14529,10 @@ async def test_wait_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_wait_operation_from_dict(): @@ -12546,6 +14551,7 @@ def test_wait_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_wait_operation_from_dict_async(): client = CloudRedisAsyncClient( @@ -12580,6 +14586,7 @@ def test_wait_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.WaitOperationRequest() + @pytest.mark.asyncio async def test_wait_operation_flattened_async(): client = CloudRedisAsyncClient( @@ -12600,7 +14607,8 @@ async def test_wait_operation_flattened_async(): def test_get_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12620,10 +14628,12 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12668,7 +14678,11 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): @@ -12694,7 +14708,10 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_get_operation_from_dict(): @@ -12713,6 +14730,7 @@ def test_get_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = CloudRedisAsyncClient( @@ -12747,6 +14765,7 @@ def test_get_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.GetOperationRequest() + @pytest.mark.asyncio async def test_get_operation_flattened_async(): client = CloudRedisAsyncClient( @@ -12767,7 +14786,8 @@ async def test_get_operation_flattened_async(): def test_list_operations(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12787,10 +14807,12 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12835,7 +14857,11 @@ def test_list_operations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_operations_field_headers_async(): @@ -12861,7 +14887,10 @@ async def test_list_operations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_operations_from_dict(): @@ -12880,6 +14909,7 @@ def test_list_operations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = CloudRedisAsyncClient( @@ -12914,6 +14944,7 @@ def test_list_operations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.ListOperationsRequest() + @pytest.mark.asyncio async def test_list_operations_flattened_async(): client = CloudRedisAsyncClient( @@ -12934,7 +14965,8 @@ async def test_list_operations_flattened_async(): def test_list_locations(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12954,10 +14986,12 @@ def test_list_locations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) + @pytest.mark.asyncio async def test_list_locations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13002,7 +15036,11 @@ def test_list_locations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_locations_field_headers_async(): @@ -13028,7 +15066,10 @@ async def test_list_locations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_locations_from_dict(): @@ -13047,6 +15088,7 @@ def test_list_locations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_locations_from_dict_async(): client = CloudRedisAsyncClient( @@ -13081,6 +15123,7 @@ def test_list_locations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == locations_pb2.ListLocationsRequest() + @pytest.mark.asyncio async def test_list_locations_flattened_async(): client = CloudRedisAsyncClient( @@ -13101,7 +15144,8 @@ async def test_list_locations_flattened_async(): def test_get_location(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13121,10 +15165,12 @@ def test_get_location(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.Location) + @pytest.mark.asyncio async def test_get_location_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13148,8 +15194,7 @@ async def test_get_location_async(transport: str = "grpc_asyncio"): def test_get_location_field_headers(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials()) + client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials()) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -13168,13 +15213,15 @@ def test_get_location_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_location_field_headers_async(): - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials() - ) + client = CloudRedisAsyncClient(credentials=async_anonymous_credentials()) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -13194,7 +15241,10 @@ async def test_get_location_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] def test_get_location_from_dict(): @@ -13213,6 +15263,7 @@ def test_get_location_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_location_from_dict_async(): client = CloudRedisAsyncClient( @@ -13247,6 +15298,7 @@ def test_get_location_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == locations_pb2.GetLocationRequest() + @pytest.mark.asyncio async def test_get_location_flattened_async(): client = CloudRedisAsyncClient( @@ -13267,10 +15319,11 @@ async def test_get_location_flattened_async(): def test_transport_close_grpc(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -13279,10 +15332,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -13290,10 +15344,11 @@ async def test_transport_close_grpc_asyncio(): def test_transport_close_rest(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -13302,12 +15357,15 @@ def test_transport_close_rest(): @pytest.mark.asyncio async def test_transport_close_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -13315,13 +15373,12 @@ async def test_transport_close_rest_asyncio(): def test_client_ctx(): transports = [ - 'rest', - 'grpc', + "rest", + "grpc", ] for transport in transports: client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -13330,10 +15387,14 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -13348,7 +15409,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py index b7a27a9b49cf..6e9b715b80c6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py @@ -28,7 +28,6 @@ import os import shlex import sys -import logging from typing import Any # If extensions (or modules to document with autodoc) are in another directory, @@ -83,9 +82,9 @@ root_doc = "index" # General information about the project. -project = u"google-cloud-redis" -copyright = u"2025, Google, LLC" -author = u"Google APIs" +project = "google-cloud-redis" +copyright = "2025, Google, LLC" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -285,7 +284,7 @@ ( root_doc, "google-cloud-redis.tex", - u"google-cloud-redis Documentation", + "google-cloud-redis Documentation", author, "manual", ) @@ -386,6 +385,7 @@ napoleon_use_param = True napoleon_use_rtype = True + # Setup for sphinx behaviors such as warning filters. class UnexpectedUnindentFilter(logging.Filter): """Filter out warnings about unexpected unindentation following bullet lists.""" @@ -413,5 +413,5 @@ def setup(app: Any) -> None: """ # Sphinx's logger is hierarchical. Adding a filter to the # root 'sphinx' logger will catch warnings from all sub-loggers. - logger = logging.getLogger('sphinx') + logger = logging.getLogger("sphinx") logger.addFilter(UnexpectedUnindentFilter()) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py index 045bcae4c55c..eb15257505d6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py @@ -18,38 +18,42 @@ __version__ = package_version.__version__ +from google.cloud.redis_v1.services.cloud_redis.async_client import ( + CloudRedisAsyncClient, +) from google.cloud.redis_v1.services.cloud_redis.client import CloudRedisClient -from google.cloud.redis_v1.services.cloud_redis.async_client import CloudRedisAsyncClient - -from google.cloud.redis_v1.types.cloud_redis import CreateInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import DeleteInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import GetInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import Instance -from google.cloud.redis_v1.types.cloud_redis import ListInstancesRequest -from google.cloud.redis_v1.types.cloud_redis import ListInstancesResponse -from google.cloud.redis_v1.types.cloud_redis import MaintenancePolicy -from google.cloud.redis_v1.types.cloud_redis import MaintenanceSchedule -from google.cloud.redis_v1.types.cloud_redis import NodeInfo -from google.cloud.redis_v1.types.cloud_redis import OperationMetadata -from google.cloud.redis_v1.types.cloud_redis import PersistenceConfig -from google.cloud.redis_v1.types.cloud_redis import TlsCertificate -from google.cloud.redis_v1.types.cloud_redis import UpdateInstanceRequest -from google.cloud.redis_v1.types.cloud_redis import WeeklyMaintenanceWindow +from google.cloud.redis_v1.types.cloud_redis import ( + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + MaintenancePolicy, + MaintenanceSchedule, + NodeInfo, + OperationMetadata, + PersistenceConfig, + TlsCertificate, + UpdateInstanceRequest, + WeeklyMaintenanceWindow, +) -__all__ = ('CloudRedisClient', - 'CloudRedisAsyncClient', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'MaintenancePolicy', - 'MaintenanceSchedule', - 'NodeInfo', - 'OperationMetadata', - 'PersistenceConfig', - 'TlsCertificate', - 'UpdateInstanceRequest', - 'WeeklyMaintenanceWindow', +__all__ = ( + "CloudRedisClient", + "CloudRedisAsyncClient", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "PersistenceConfig", + "TlsCertificate", + "UpdateInstanceRequest", + "WeeklyMaintenanceWindow", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py index 1f7bad3796c9..5cffab29c331 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.redis_v1 import gapic_version as package_version +import sys import google.api_core as api_core -import sys +from google.cloud.redis_v1 import gapic_version as package_version __version__ = package_version.__version__ @@ -28,51 +28,57 @@ import importlib_metadata as metadata -from .services.cloud_redis import CloudRedisClient -from .services.cloud_redis import CloudRedisAsyncClient - -from .types.cloud_redis import CreateInstanceRequest -from .types.cloud_redis import DeleteInstanceRequest -from .types.cloud_redis import GetInstanceRequest -from .types.cloud_redis import Instance -from .types.cloud_redis import ListInstancesRequest -from .types.cloud_redis import ListInstancesResponse -from .types.cloud_redis import MaintenancePolicy -from .types.cloud_redis import MaintenanceSchedule -from .types.cloud_redis import NodeInfo -from .types.cloud_redis import OperationMetadata -from .types.cloud_redis import PersistenceConfig -from .types.cloud_redis import TlsCertificate -from .types.cloud_redis import UpdateInstanceRequest -from .types.cloud_redis import WeeklyMaintenanceWindow +from .services.cloud_redis import CloudRedisAsyncClient, CloudRedisClient +from .types.cloud_redis import ( + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + MaintenancePolicy, + MaintenanceSchedule, + NodeInfo, + OperationMetadata, + PersistenceConfig, + TlsCertificate, + UpdateInstanceRequest, + WeeklyMaintenanceWindow, +) -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.redis_v1") # type: ignore - api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr( + api_core, "check_dependency_versions" +): # pragma: NO COVER + api_core.check_python_version("google.cloud.redis_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: - import warnings import sys + import warnings _py_version_str = sys.version.split()[0] _package_label = "google.cloud.redis_v1" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -110,41 +116,45 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'CloudRedisAsyncClient', -'CloudRedisClient', -'CreateInstanceRequest', -'DeleteInstanceRequest', -'GetInstanceRequest', -'Instance', -'ListInstancesRequest', -'ListInstancesResponse', -'MaintenancePolicy', -'MaintenanceSchedule', -'NodeInfo', -'OperationMetadata', -'PersistenceConfig', -'TlsCertificate', -'UpdateInstanceRequest', -'WeeklyMaintenanceWindow', + "CloudRedisAsyncClient", + "CloudRedisClient", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "PersistenceConfig", + "TlsCertificate", + "UpdateInstanceRequest", + "WeeklyMaintenanceWindow", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py index f302df64f13f..b2a40b86386e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import CloudRedisClient from .async_client import CloudRedisAsyncClient +from .client import CloudRedisClient __all__ = ( - 'CloudRedisClient', - 'CloudRedisAsyncClient', + "CloudRedisClient", + "CloudRedisAsyncClient", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py index aca792958d89..c31863e1a5dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -14,47 +14,59 @@ # limitations under the License. # import logging as std_logging -from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.redis_v1 import gapic_version as package_version +from collections import OrderedDict +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) -from google.api_core.client_options import ClientOptions +import google.protobuf from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.redis_v1 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.redis_v1.services.cloud_redis import pagers -from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1.services.cloud_redis import pagers +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore + from .client import CloudRedisClient +from .transports.base import DEFAULT_CLIENT_INFO, CloudRedisTransport +from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class CloudRedisAsyncClient: """Configures and manages Cloud Memorystore for Redis instances @@ -90,16 +102,24 @@ class CloudRedisAsyncClient: instance_path = staticmethod(CloudRedisClient.instance_path) parse_instance_path = staticmethod(CloudRedisClient.parse_instance_path) - common_billing_account_path = staticmethod(CloudRedisClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(CloudRedisClient.parse_common_billing_account_path) + common_billing_account_path = staticmethod( + CloudRedisClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudRedisClient.parse_common_billing_account_path + ) common_folder_path = staticmethod(CloudRedisClient.common_folder_path) parse_common_folder_path = staticmethod(CloudRedisClient.parse_common_folder_path) common_organization_path = staticmethod(CloudRedisClient.common_organization_path) - parse_common_organization_path = staticmethod(CloudRedisClient.parse_common_organization_path) + parse_common_organization_path = staticmethod( + CloudRedisClient.parse_common_organization_path + ) common_project_path = staticmethod(CloudRedisClient.common_project_path) parse_common_project_path = staticmethod(CloudRedisClient.parse_common_project_path) common_location_path = staticmethod(CloudRedisClient.common_location_path) - parse_common_location_path = staticmethod(CloudRedisClient.parse_common_location_path) + parse_common_location_path = staticmethod( + CloudRedisClient.parse_common_location_path + ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): @@ -141,7 +161,9 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -204,12 +226,16 @@ def universe_domain(self) -> str: get_transport_class = CloudRedisClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the cloud redis async client. Args: @@ -267,31 +293,39 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisAsyncClient`.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.cloud.redis.v1.CloudRedis", "credentialsType": None, - } + }, ) - async def list_instances(self, - request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesAsyncPager: + async def list_instances( + self, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesAsyncPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -364,10 +398,14 @@ async def sample_list_instances(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -381,14 +419,14 @@ async def sample_list_instances(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instances] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_instances + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -416,14 +454,15 @@ async def sample_list_instances(): # Done; return the response. return response - async def get_instance(self, - request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.Instance: + async def get_instance( + self, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. .. code-block:: python @@ -480,10 +519,14 @@ async def sample_get_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -497,14 +540,14 @@ async def sample_get_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -521,16 +564,17 @@ async def sample_get_instance(): # Done; return the response. return response - async def create_instance(self, - request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_instance( + self, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -636,10 +680,14 @@ async def sample_create_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, instance_id, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -657,14 +705,14 @@ async def sample_create_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -689,15 +737,16 @@ async def sample_create_instance(): # Done; return the response. return response - async def update_instance(self, - request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, - *, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_instance( + self, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, + *, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Updates the metadata and configuration of a specific Redis instance. Completed longrunning.Operation will contain the new @@ -787,10 +836,14 @@ async def sample_update_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [update_mask, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -806,14 +859,16 @@ async def sample_update_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), ) # Validate the universe domain. @@ -838,14 +893,15 @@ async def sample_update_instance(): # Done; return the response. return response - async def delete_instance(self, - request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_instance( + self, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -919,10 +975,14 @@ async def sample_delete_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -936,14 +996,14 @@ async def sample_delete_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance] + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_instance + ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1010,8 +1070,7 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1019,7 +1078,11 @@ async def list_operations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1066,8 +1129,7 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1075,7 +1137,11 @@ async def get_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1126,15 +1192,19 @@ async def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def cancel_operation( self, @@ -1181,15 +1251,19 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def wait_operation( self, @@ -1239,8 +1313,7 @@ async def wait_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1248,7 +1321,11 @@ async def wait_operation( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1295,8 +1372,7 @@ async def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1304,7 +1380,11 @@ async def get_location( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1351,8 +1431,7 @@ async def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1360,7 +1439,11 @@ async def list_locations( # Send the request. response = await rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1371,12 +1454,13 @@ async def __aenter__(self) -> "CloudRedisAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "CloudRedisAsyncClient", -) +__all__ = ("CloudRedisAsyncClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py index efaccae55c6f..483937d27b16 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py @@ -13,27 +13,38 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -from http import HTTPStatus import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) -from google.cloud.redis_v1 import gapic_version as package_version - +import google.protobuf from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.redis_v1 import gapic_version as package_version +from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -42,29 +53,33 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.redis_v1.services.cloud_redis import pagers -from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore import google.api_core.operation as operation # type: ignore import google.api_core.operation_async as operation_async # type: ignore import google.protobuf.empty_pb2 as empty_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore -from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1.services.cloud_redis import pagers +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore + +from .transports.base import DEFAULT_CLIENT_INFO, CloudRedisTransport from .transports.grpc import CloudRedisGrpcTransport from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .transports.rest import CloudRedisRestTransport + try: from .transports.rest_asyncio import AsyncCloudRedisRestTransport + HAS_ASYNC_REST_DEPENDENCIES = True -except ImportError as e: # pragma: NO COVER +except ImportError as e: # pragma: NO COVER HAS_ASYNC_REST_DEPENDENCIES = False ASYNC_REST_EXCEPTION = e @@ -76,6 +91,7 @@ class CloudRedisClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport @@ -83,9 +99,10 @@ class CloudRedisClientMeta(type): if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[CloudRedisTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CloudRedisTransport]: """Returns an appropriate transport class. Args: @@ -96,7 +113,9 @@ def get_transport_class(cls, The transport class to use. """ # If a specific transport is requested, return that one. - if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + if ( + label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES + ): # pragma: NO COVER raise ASYNC_REST_EXCEPTION if label: return cls._transport_registry[label] @@ -184,14 +203,16 @@ def _use_client_cert_effective(): bool: whether client certificate should be used for mTLS Raises: ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var - use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() if use_client_cert_str not in ("true", "false"): raise ValueError( "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" @@ -230,8 +251,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: CloudRedisClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) + credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -248,73 +268,108 @@ def transport(self) -> CloudRedisTransport: return self._transport @staticmethod - def instance_path(project: str,location: str,instance: str,) -> str: + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified instance string.""" - return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: + def parse_instance_path(path: str) -> Dict[str, str]: """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", + path, + ) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -346,14 +401,18 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = CloudRedisClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Figure out the client cert source to use. client_cert_source = None @@ -366,7 +425,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT @@ -391,7 +452,9 @@ def _read_environment_variables(): use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod @@ -414,7 +477,9 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): return client_cert_source @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint) -> str: + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: """Return the API endpoint used by the client. Args: @@ -430,17 +495,25 @@ def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtl """ if api_override is not None: api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): _default_universe = CloudRedisClient._DEFAULT_UNIVERSE if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) api_endpoint = CloudRedisClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + api_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) return api_endpoint @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: """Return the universe domain used by the client. Args: @@ -476,15 +549,18 @@ def _validate_universe_domain(self): return True def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError + self, error: core_exceptions.GoogleAPICallError ) -> None: """Adds credential info string to error details for 401/403/404 errors. Args: error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: return cred = self._transport._credentials @@ -517,12 +593,16 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the cloud redis client. Args: @@ -580,13 +660,21 @@ def __init__(self, *, self._client_options = client_options_lib.from_dict(self._client_options) if self._client_options is None: self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CloudRedisClient._read_environment_variables() - self._client_cert_source = CloudRedisClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = CloudRedisClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + CloudRedisClient._read_environment_variables() + ) + self._client_cert_source = CloudRedisClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = CloudRedisClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) self._api_endpoint: str = "" # updated below, depending on `transport` # Initialize the universe domain validation. @@ -598,7 +686,9 @@ def __init__(self, *, api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -607,25 +697,28 @@ def __init__(self, *, if transport_provided: # transport is a CloudRedisTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly." ) self._transport = cast(CloudRedisTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - CloudRedisClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or CloudRedisClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: - transport_init: Union[Type[CloudRedisTransport], Callable[..., CloudRedisTransport]] = ( + transport_init: Union[ + Type[CloudRedisTransport], Callable[..., CloudRedisTransport] + ] = ( CloudRedisClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudRedisTransport], transport) @@ -638,9 +731,12 @@ def __init__(self, *, "google.api_core.client_options.ClientOptions.quota_project_id": self._client_options.quota_project_id, "google.api_core.client_options.ClientOptions.client_cert_source": self._client_options.client_cert_source, "google.api_core.client_options.ClientOptions.api_audience": self._client_options.api_audience, - } - provided_unsupported_params = [name for name, value in unsupported_params.items() if value is not None] + provided_unsupported_params = [ + name + for name, value in unsupported_params.items() + if value is not None + ] if provided_unsupported_params: raise core_exceptions.AsyncRestUnsupportedParameterError( # type: ignore f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}" @@ -654,8 +750,12 @@ def __init__(self, *, import google.auth._default # type: ignore - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) # initialize with the provided callable or the passed in class self._transport = transport_init( @@ -671,28 +771,37 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisClient`.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.cloud.redis.v1.CloudRedis", "credentialsType": None, - } + }, ) - def list_instances(self, - request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesPager: + def list_instances( + self, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -765,10 +874,14 @@ def sample_list_instances(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -786,9 +899,7 @@ def sample_list_instances(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -816,14 +927,15 @@ def sample_list_instances(): # Done; return the response. return response - def get_instance(self, - request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.Instance: + def get_instance( + self, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. .. code-block:: python @@ -880,10 +992,14 @@ def sample_get_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -901,9 +1017,7 @@ def sample_get_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -920,16 +1034,17 @@ def sample_get_instance(): # Done; return the response. return response - def create_instance(self, - request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_instance( + self, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -1035,10 +1150,14 @@ def sample_create_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [parent, instance_id, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1060,9 +1179,7 @@ def sample_create_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Validate the universe domain. @@ -1087,15 +1204,16 @@ def sample_create_instance(): # Done; return the response. return response - def update_instance(self, - request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, - *, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_instance( + self, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, + *, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Updates the metadata and configuration of a specific Redis instance. Completed longrunning.Operation will contain the new @@ -1185,10 +1303,14 @@ def sample_update_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [update_mask, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1208,9 +1330,9 @@ def sample_update_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), + gapic_v1.routing_header.to_grpc_metadata( + (("instance.name", request.instance.name),) + ), ) # Validate the universe domain. @@ -1235,14 +1357,15 @@ def sample_update_instance(): # Done; return the response. return response - def delete_instance(self, - request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_instance( + self, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -1316,10 +1439,14 @@ def sample_delete_instance(): # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1337,9 +1464,7 @@ def sample_delete_instance(): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -1419,8 +1544,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1429,7 +1553,11 @@ def list_operations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1479,8 +1607,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1489,7 +1616,11 @@ def get_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1543,15 +1674,19 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -1598,15 +1733,19 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request_pb, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def wait_operation( self, @@ -1656,8 +1795,7 @@ def wait_operation( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1666,7 +1804,11 @@ def wait_operation( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1716,8 +1858,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1726,7 +1867,11 @@ def get_location( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1776,8 +1921,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request_pb.name),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), ) # Validate the universe domain. @@ -1786,7 +1930,11 @@ def list_locations( try: # Send the request. response = rpc( - request_pb, retry=retry, timeout=timeout, metadata=metadata,) + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1795,11 +1943,11 @@ def list_locations( raise e -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "CloudRedisClient", -) +__all__ = ("CloudRedisClient",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py index 4e0e4cb32343..10b0f261891a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -13,13 +13,27 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore @@ -44,14 +58,17 @@ class ListInstancesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., cloud_redis.ListInstancesResponse], - request: cloud_redis.ListInstancesRequest, - response: cloud_redis.ListInstancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., cloud_redis.ListInstancesResponse], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiate the pager. Args: @@ -84,7 +101,12 @@ def pages(self) -> Iterator[cloud_redis.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[cloud_redis.Instance]: @@ -92,7 +114,7 @@ def __iter__(self) -> Iterator[cloud_redis.Instance]: yield from page.instances def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListInstancesAsyncPager: @@ -112,14 +134,17 @@ class ListInstancesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]], - request: cloud_redis.ListInstancesRequest, - response: cloud_redis.ListInstancesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): """Instantiates the pager. Args: @@ -152,8 +177,14 @@ async def pages(self) -> AsyncIterator[cloud_redis.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response + def __aiter__(self) -> AsyncIterator[cloud_redis.Instance]: async def async_generator(): async for page in self.pages: @@ -163,4 +194,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 1cbbf54c2525..c529d7ba2173 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -14,18 +14,24 @@ # limitations under the License. # from collections import OrderedDict -from typing import Dict, Type, Tuple +from typing import Dict, Tuple, Type from .base import CloudRedisTransport from .grpc import CloudRedisGrpcTransport from .grpc_asyncio import CloudRedisGrpcAsyncIOTransport -from .rest import CloudRedisRestTransport -from .rest import CloudRedisRestInterceptor +from .rest import CloudRedisRestInterceptor, CloudRedisRestTransport + ASYNC_REST_CLASSES: Tuple[str, ...] try: - from .rest_asyncio import AsyncCloudRedisRestTransport - from .rest_asyncio import AsyncCloudRedisRestInterceptor - ASYNC_REST_CLASSES = ('AsyncCloudRedisRestTransport', 'AsyncCloudRedisRestInterceptor') + from .rest_asyncio import ( + AsyncCloudRedisRestInterceptor, + AsyncCloudRedisRestTransport, + ) + + ASYNC_REST_CLASSES = ( + "AsyncCloudRedisRestTransport", + "AsyncCloudRedisRestInterceptor", + ) HAS_REST_ASYNC = True except ImportError: # pragma: NO COVER ASYNC_REST_CLASSES = () @@ -34,16 +40,16 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] -_transport_registry['grpc'] = CloudRedisGrpcTransport -_transport_registry['grpc_asyncio'] = CloudRedisGrpcAsyncIOTransport -_transport_registry['rest'] = CloudRedisRestTransport +_transport_registry["grpc"] = CloudRedisGrpcTransport +_transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport +_transport_registry["rest"] = CloudRedisRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry['rest_asyncio'] = AsyncCloudRedisRestTransport + _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport __all__ = ( - 'CloudRedisTransport', - 'CloudRedisGrpcTransport', - 'CloudRedisGrpcAsyncIOTransport', - 'CloudRedisRestTransport', - 'CloudRedisRestInterceptor', + "CloudRedisTransport", + "CloudRedisGrpcTransport", + "CloudRedisGrpcAsyncIOTransport", + "CloudRedisRestTransport", + "CloudRedisRestInterceptor", ) + ASYNC_REST_CLASSES diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 37db15e36102..9a11e202d6b1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -16,23 +16,22 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -from google.cloud.redis_v1 import gapic_version as package_version - -import google.auth # type: ignore import google.api_core +import google.auth # type: ignore +import google.protobuf from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1 import gapic_version as package_version from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -41,24 +40,23 @@ class CloudRedisTransport(abc.ABC): """Abstract transport class for CloudRedis.""" - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - DEFAULT_HOST: str = 'redis.googleapis.com' + DEFAULT_HOST: str = "redis.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: @@ -97,31 +95,43 @@ def __init__( # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - default_scopes=self.AUTH_SCOPES, - ) + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id, default_scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host self._wrapped_methods: Dict[Callable, Callable] = {} @@ -193,14 +203,14 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -210,48 +220,51 @@ def operations_client(self): raise NotImplementedError() @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - Union[ - cloud_redis.ListInstancesResponse, - Awaitable[cloud_redis.ListInstancesResponse] - ]]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], + Union[ + cloud_redis.ListInstancesResponse, + Awaitable[cloud_redis.ListInstancesResponse], + ], + ]: raise NotImplementedError() @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - Union[ - cloud_redis.Instance, - Awaitable[cloud_redis.Instance] - ]]: + def get_instance( + self, + ) -> Callable[ + [cloud_redis.GetInstanceRequest], + Union[cloud_redis.Instance, Awaitable[cloud_redis.Instance]], + ]: raise NotImplementedError() @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_instance( + self, + ) -> Callable[ + [cloud_redis.CreateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_instance( + self, + ) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_instance( + self, + ) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: raise NotImplementedError() @property @@ -259,7 +272,10 @@ def list_operations( self, ) -> Callable[ [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], ]: raise NotImplementedError() @@ -300,7 +316,8 @@ def wait_operation( raise NotImplementedError() @property - def get_location(self, + def get_location( + self, ) -> Callable[ [locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], @@ -308,10 +325,14 @@ def get_location(self, raise NotImplementedError() @property - def list_locations(self, + def list_locations( + self, ) -> Callable[ [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], ]: raise NotImplementedError() @@ -320,6 +341,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'CloudRedisTransport', -) +__all__ = ("CloudRedisTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index ee8b7164a801..eca8947b3675 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -19,25 +19,23 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson +import google.auth # type: ignore import google.protobuf.message - import grpc # type: ignore import proto # type: ignore - -from google.cloud.location import locations_pb2 # type: ignore +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore -from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson + +from .base import DEFAULT_CLIENT_INFO, CloudRedisTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -47,7 +45,9 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -68,7 +68,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -79,7 +79,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -94,7 +98,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": client_call_details.method, "response": grpc_response, @@ -136,23 +140,26 @@ class CloudRedisGrpcTransport(CloudRedisTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -280,19 +287,23 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -328,13 +339,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -354,9 +364,11 @@ def operations_client(self) -> operations_v1.OperationsClient: return self._operations_client @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse + ]: r"""Return a callable for the list instances method over gRPC. Lists all Redis instances owned by a project in either the @@ -380,18 +392,18 @@ def list_instances(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ListInstances', + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ListInstances", request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, ) - return self._stubs['list_instances'] + return self._stubs["list_instances"] @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: r"""Return a callable for the get instance method over gRPC. Gets the details of a specific Redis instance. @@ -406,18 +418,18 @@ def get_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstance', + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstance", request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, ) - return self._stubs['get_instance'] + return self._stubs["get_instance"] @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the create instance method over gRPC. Creates a Redis instance based on the specified tier and memory @@ -445,18 +457,18 @@ def create_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/CreateInstance', + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/CreateInstance", request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_instance'] + return self._stubs["create_instance"] @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the update instance method over gRPC. Updates the metadata and configuration of a specific @@ -476,18 +488,18 @@ def update_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpdateInstance", request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_instance'] + return self._stubs["update_instance"] @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the delete instance method over gRPC. Deletes a specific Redis instance. Instance stops @@ -503,13 +515,13 @@ def delete_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/DeleteInstance", request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_instance'] + return self._stubs["delete_instance"] def close(self): self._logged_channel.close() @@ -518,8 +530,7 @@ def close(self): def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -536,8 +547,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -554,8 +564,7 @@ def cancel_operation( def wait_operation( self, ) -> Callable[[operations_pb2.WaitOperationRequest], None]: - r"""Return a callable for the wait_operation method over gRPC. - """ + r"""Return a callable for the wait_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -572,8 +581,7 @@ def wait_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -589,9 +597,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -607,9 +616,10 @@ def list_operations( @property def list_locations( self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -626,8 +636,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -645,6 +654,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'CloudRedisGrpcTransport', -) +__all__ = ("CloudRedisGrpcTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 7999af4c3fd3..0f56898dbe3e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -15,33 +15,31 @@ # import inspect import json -import pickle import logging as std_logging +import pickle import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore -from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from .base import DEFAULT_CLIENT_INFO, CloudRedisTransport from .grpc import CloudRedisGrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -49,9 +47,13 @@ _LOGGER = std_logging.getLogger(__name__) -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) if logging_enabled: # pragma: NO COVER request_metadata = client_call_details.metadata if isinstance(request, proto.Message): @@ -72,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -83,7 +85,11 @@ async def intercept_unary_unary(self, continuation, client_call_details, request if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) result = await response if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -98,7 +104,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -145,13 +151,15 @@ class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -182,24 +190,26 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: @@ -329,7 +339,9 @@ def __init__(self, *, self._interceptor = _LoggingClientAIOInterceptor() self._grpc_channel._unary_unary_interceptors.append(self._interceptor) self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -360,9 +372,11 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: return self._operations_client @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - Awaitable[cloud_redis.ListInstancesResponse]]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], Awaitable[cloud_redis.ListInstancesResponse] + ]: r"""Return a callable for the list instances method over gRPC. Lists all Redis instances owned by a project in either the @@ -386,18 +400,18 @@ def list_instances(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ListInstances', + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ListInstances", request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, ) - return self._stubs['list_instances'] + return self._stubs["list_instances"] @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - Awaitable[cloud_redis.Instance]]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], Awaitable[cloud_redis.Instance]]: r"""Return a callable for the get instance method over gRPC. Gets the details of a specific Redis instance. @@ -412,18 +426,20 @@ def get_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstance', + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstance", request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, ) - return self._stubs['get_instance'] + return self._stubs["get_instance"] @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def create_instance( + self, + ) -> Callable[ + [cloud_redis.CreateInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the create instance method over gRPC. Creates a Redis instance based on the specified tier and memory @@ -451,18 +467,20 @@ def create_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/CreateInstance', + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/CreateInstance", request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_instance'] + return self._stubs["create_instance"] @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def update_instance( + self, + ) -> Callable[ + [cloud_redis.UpdateInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the update instance method over gRPC. Updates the metadata and configuration of a specific @@ -482,18 +500,20 @@ def update_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpdateInstance", request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_instance'] + return self._stubs["update_instance"] @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def delete_instance( + self, + ) -> Callable[ + [cloud_redis.DeleteInstanceRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the delete instance method over gRPC. Deletes a specific Redis instance. Instance stops @@ -509,16 +529,16 @@ def delete_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/DeleteInstance", request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_instance'] + return self._stubs["delete_instance"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_instances: self._wrap_method( self.list_instances, @@ -598,8 +618,7 @@ def kind(self) -> str: def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -616,8 +635,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -634,8 +652,7 @@ def cancel_operation( def wait_operation( self, ) -> Callable[[operations_pb2.WaitOperationRequest], None]: - r"""Return a callable for the wait_operation method over gRPC. - """ + r"""Return a callable for the wait_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -652,8 +669,7 @@ def wait_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -669,9 +685,10 @@ def get_operation( @property def list_operations( self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -687,9 +704,10 @@ def list_operations( @property def list_locations( self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -706,8 +724,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -721,6 +738,4 @@ def get_location( return self._stubs["get_location"] -__all__ = ( - 'CloudRedisGrpcAsyncIOTransport', -) +__all__ = ("CloudRedisGrpcAsyncIOTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index dd225e2b49a1..cedf452649b4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -13,34 +13,26 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import logging +import dataclasses import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +import google.protobuf from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version - -from .rest_base import _BaseCloudRedisRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseCloudRedisRestTransport try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -49,6 +41,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -125,7 +118,14 @@ def post_update_instance(self, response): """ - def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_create_instance( + self, + request: cloud_redis.CreateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -133,7 +133,9 @@ def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metada """ return request, metadata - def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance DEPRECATED. Please use the `post_create_instance_with_metadata` @@ -146,7 +148,11 @@ def post_create_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_instance Override in a subclass to read or manipulate the response or metadata after it @@ -161,7 +167,13 @@ def post_create_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_instance( + self, + request: cloud_redis.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -169,7 +181,9 @@ def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metada """ return request, metadata - def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance DEPRECATED. Please use the `post_delete_instance_with_metadata` @@ -182,7 +196,11 @@ def post_delete_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_instance Override in a subclass to read or manipulate the response or metadata after it @@ -197,7 +215,11 @@ def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_instance( + self, + request: cloud_redis.GetInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -218,7 +240,11 @@ def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Insta """ return response - def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_instance_with_metadata( + self, + response: cloud_redis.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance Override in a subclass to read or manipulate the response or metadata after it @@ -233,7 +259,13 @@ def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metada """ return response, metadata - def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_instances( + self, + request: cloud_redis.ListInstancesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -241,7 +273,9 @@ def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata """ return request, metadata - def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + def post_list_instances( + self, response: cloud_redis.ListInstancesResponse + ) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances DEPRECATED. Please use the `post_list_instances_with_metadata` @@ -254,7 +288,13 @@ def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cl """ return response - def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_instances_with_metadata( + self, + response: cloud_redis.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for list_instances Override in a subclass to read or manipulate the response or metadata after it @@ -269,7 +309,13 @@ def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesR """ return response, metadata - def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_instance( + self, + request: cloud_redis.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -277,7 +323,9 @@ def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metada """ return request, metadata - def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance DEPRECATED. Please use the `post_update_instance_with_metadata` @@ -290,7 +338,11 @@ def post_update_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_instance Override in a subclass to read or manipulate the response or metadata after it @@ -306,8 +358,12 @@ def post_update_instance_with_metadata(self, response: operations_pb2.Operation, return response, metadata def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -327,8 +383,12 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -348,8 +408,12 @@ def post_list_locations( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -357,9 +421,7 @@ def pre_cancel_operation( """ return request, metadata - def post_cancel_operation( - self, response: None - ) -> None: + def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -369,8 +431,12 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -378,9 +444,7 @@ def pre_delete_operation( """ return request, metadata - def post_delete_operation( - self, response: None - ) -> None: + def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -390,8 +454,12 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -411,8 +479,12 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -432,8 +504,12 @@ def post_list_operations( return response def pre_wait_operation( - self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.WaitOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for wait_operation Override in a subclass to manipulate the request or metadata @@ -492,62 +568,63 @@ class CloudRedisRestTransport(_BaseCloudRedisRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[CloudRedisRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudRedisRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - interceptor (Optional[CloudRedisRestInterceptor]): Interceptor used - to manipulate requests, request metadata, and responses. - api_audience (Optional[str]): The intended audience for the API calls - to the service that will be set when using certain 3rd party - authentication flows. Audience is typically a resource identifier. - If not set, the host value will be used as a default. + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[CloudRedisRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -559,10 +636,11 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._credentials, default_host=self.DEFAULT_HOST + ) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -579,53 +657,58 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], - 'google.longrunning.Operations.WaitOperation': [ + "google.longrunning.Operations.WaitOperation": [ { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client - class _CreateInstance(_BaseCloudRedisRestTransport._BaseCreateInstance, CloudRedisRestStub): + class _CreateInstance( + _BaseCloudRedisRestTransport._BaseCreateInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.CreateInstance") @@ -637,27 +720,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -680,32 +765,48 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_create_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "httpRequest": http_request, @@ -714,7 +815,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -727,20 +836,24 @@ def __call__(self, resp = self._interceptor.post_create_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.create_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "metadata": http_response["headers"], @@ -749,7 +862,9 @@ def __call__(self, ) return resp - class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, CloudRedisRestStub): + class _DeleteInstance( + _BaseCloudRedisRestTransport._BaseDeleteInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.DeleteInstance") @@ -761,26 +876,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -803,30 +920,42 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + ) request, metadata = self._interceptor.pre_delete_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "httpRequest": http_request, @@ -835,7 +964,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -848,20 +984,24 @@ def __call__(self, resp = self._interceptor.post_delete_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.delete_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "metadata": http_response["headers"], @@ -870,7 +1010,9 @@ def __call__(self, ) return resp - class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, CloudRedisRestStub): + class _GetInstance( + _BaseCloudRedisRestTransport._BaseGetInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.GetInstance") @@ -882,26 +1024,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.Instance: + def __call__( + self, + request: cloud_redis.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. Args: @@ -921,30 +1065,44 @@ def __call__(self, A Memorystore for Redis instance. """ - http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + ) request, metadata = self._interceptor.pre_get_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "httpRequest": http_request, @@ -953,7 +1111,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -968,20 +1133,24 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = cloud_redis.Instance.to_json(response) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.get_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "metadata": http_response["headers"], @@ -990,7 +1159,9 @@ def __call__(self, ) return resp - class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, CloudRedisRestStub): + class _ListInstances( + _BaseCloudRedisRestTransport._BaseListInstances, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.ListInstances") @@ -1002,26 +1173,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.ListInstancesResponse: + def __call__( + self, + request: cloud_redis.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -1043,30 +1216,44 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + ) request, metadata = self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "httpRequest": http_request, @@ -1075,7 +1262,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1090,20 +1284,26 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = cloud_redis.ListInstancesResponse.to_json(response) + response_payload = cloud_redis.ListInstancesResponse.to_json( + response + ) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.list_instances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "metadata": http_response["headers"], @@ -1112,7 +1312,9 @@ def __call__(self, ) return resp - class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, CloudRedisRestStub): + class _UpdateInstance( + _BaseCloudRedisRestTransport._BaseUpdateInstance, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.UpdateInstance") @@ -1124,27 +1326,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -1167,32 +1371,48 @@ def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + ) request, metadata = self._interceptor.pre_update_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "httpRequest": http_request, @@ -1201,7 +1421,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1214,20 +1442,24 @@ def __call__(self, resp = self._interceptor.post_update_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.update_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "metadata": http_response["headers"], @@ -1237,50 +1469,54 @@ def __call__(self, return resp @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse + ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub): + class _GetLocation( + _BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.GetLocation") @@ -1292,27 +1528,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -1330,30 +1567,44 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + ) request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpRequest": http_request, @@ -1362,7 +1613,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1373,19 +1631,21 @@ def __call__(self, resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpResponse": http_response, @@ -1396,9 +1656,11 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub): + class _ListLocations( + _BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.ListLocations") @@ -1410,27 +1672,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -1448,30 +1711,44 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + ) request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpRequest": http_request, @@ -1480,7 +1757,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1491,19 +1775,21 @@ def __call__(self, resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpResponse": http_response, @@ -1514,9 +1800,11 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub): + class _CancelOperation( + _BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.CancelOperation") @@ -1528,27 +1816,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -1563,30 +1852,42 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + ) - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CancelOperation", "httpRequest": http_request, @@ -1595,7 +1896,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1606,9 +1914,11 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub): + class _DeleteOperation( + _BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.DeleteOperation") @@ -1620,27 +1930,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -1655,30 +1966,42 @@ def __call__(self, be of type `bytes`. """ - http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + ) - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteOperation", "httpRequest": http_request, @@ -1687,7 +2010,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1698,9 +2028,11 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub): + class _GetOperation( + _BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.GetOperation") @@ -1712,27 +2044,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -1750,30 +2083,44 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + ) request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpRequest": http_request, @@ -1782,7 +2129,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1793,19 +2147,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpResponse": http_response, @@ -1816,9 +2172,11 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub): + class _ListOperations( + _BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.ListOperations") @@ -1830,27 +2188,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -1868,30 +2227,42 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + ) request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpRequest": http_request, @@ -1900,7 +2271,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1911,19 +2289,21 @@ def __call__(self, resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpResponse": http_response, @@ -1934,9 +2314,11 @@ def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore - class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, CloudRedisRestStub): + class _WaitOperation( + _BaseCloudRedisRestTransport._BaseWaitOperation, CloudRedisRestStub + ): def __hash__(self): return hash("CloudRedisRestTransport.WaitOperation") @@ -1948,28 +2330,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: operations_pb2.WaitOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.WaitOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the wait operation method over HTTP. Args: @@ -1987,32 +2370,50 @@ def __call__(self, operations_pb2.Operation: Response from WaitOperation method. """ - http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + ) request, metadata = self._interceptor.pre_wait_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request( + http_options, request + ) + ) - body = _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpRequest": http_request, @@ -2021,7 +2422,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._WaitOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2032,19 +2441,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_wait_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpResponse": http_response, @@ -2061,6 +2472,4 @@ def close(self): self._session.close() -__all__=( - 'CloudRedisRestTransport', -) +__all__ = ("CloudRedisRestTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index e8ca3154862d..9d233635b95f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -15,47 +15,45 @@ # import google.auth + try: - import aiohttp # type: ignore - from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore - from google.api_core import rest_streaming_async # type: ignore - from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore + import aiohttp # type: ignore + from google.api_core import rest_streaming_async # type: ignore + from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore + from google.auth.aio.transport.sessions import ( + AsyncAuthorizedSession, # type: ignore + ) except ImportError as e: # pragma: NO COVER - raise ImportError("`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install google-cloud-redis[async_rest]`") from e + raise ImportError( + "`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install google-cloud-redis[async_rest]`" + ) from e -from google.auth.aio import credentials as ga_credentials_async # type: ignore +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import google.protobuf from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore +from google.api_core import ( + gapic_v1, + operations_v1, + rest_helpers, + rest_streaming_async, # type: ignore +) from google.api_core import retry_async as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming_async # type: ignore -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - -import json # type: ignore -import dataclasses -from typing import Any, Dict, List, Callable, Tuple, Optional, Sequence, Union - - +from google.auth.aio import credentials as ga_credentials_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseCloudRedisRestTransport +from google.protobuf import json_format from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -import logging +from .rest_base import _BaseCloudRedisRestTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -137,7 +135,14 @@ async def post_update_instance(self, response): """ - async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + async def pre_create_instance( + self, + request: cloud_redis.CreateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -145,7 +150,9 @@ async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, """ return request, metadata - async def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_create_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance DEPRECATED. Please use the `post_create_instance_with_metadata` @@ -158,7 +165,11 @@ async def post_create_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_instance Override in a subclass to read or manipulate the response or metadata after it @@ -173,7 +184,13 @@ async def post_create_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_delete_instance( + self, + request: cloud_redis.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -181,7 +198,9 @@ async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, """ return request, metadata - async def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_delete_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance DEPRECATED. Please use the `post_delete_instance_with_metadata` @@ -194,7 +213,11 @@ async def post_delete_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_instance Override in a subclass to read or manipulate the response or metadata after it @@ -209,7 +232,11 @@ async def post_delete_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_get_instance( + self, + request: cloud_redis.GetInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -217,7 +244,9 @@ async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metada """ return request, metadata - async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: + async def post_get_instance( + self, response: cloud_redis.Instance + ) -> cloud_redis.Instance: """Post-rpc interceptor for get_instance DEPRECATED. Please use the `post_get_instance_with_metadata` @@ -230,7 +259,11 @@ async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis """ return response - async def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_get_instance_with_metadata( + self, + response: cloud_redis.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance Override in a subclass to read or manipulate the response or metadata after it @@ -245,7 +278,13 @@ async def post_get_instance_with_metadata(self, response: cloud_redis.Instance, """ return response, metadata - async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_list_instances( + self, + request: cloud_redis.ListInstancesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -253,7 +292,9 @@ async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, me """ return request, metadata - async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + async def post_list_instances( + self, response: cloud_redis.ListInstancesResponse + ) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances DEPRECATED. Please use the `post_list_instances_with_metadata` @@ -266,7 +307,13 @@ async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) """ return response - async def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_list_instances_with_metadata( + self, + response: cloud_redis.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Post-rpc interceptor for list_instances Override in a subclass to read or manipulate the response or metadata after it @@ -281,7 +328,13 @@ async def post_list_instances_with_metadata(self, response: cloud_redis.ListInst """ return response, metadata - async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_update_instance( + self, + request: cloud_redis.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -289,7 +342,9 @@ async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, """ return request, metadata - async def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_update_instance( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance DEPRECATED. Please use the `post_update_instance_with_metadata` @@ -302,7 +357,11 @@ async def post_update_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_instance Override in a subclass to read or manipulate the response or metadata after it @@ -318,8 +377,12 @@ async def post_update_instance_with_metadata(self, response: operations_pb2.Oper return response, metadata async def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -339,8 +402,12 @@ async def post_get_location( return response async def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -360,8 +427,12 @@ async def post_list_locations( return response async def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -369,9 +440,7 @@ async def pre_cancel_operation( """ return request, metadata - async def post_cancel_operation( - self, response: None - ) -> None: + async def post_cancel_operation(self, response: None) -> None: """Post-rpc interceptor for cancel_operation Override in a subclass to manipulate the response @@ -381,8 +450,12 @@ async def post_cancel_operation( return response async def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -390,9 +463,7 @@ async def pre_delete_operation( """ return request, metadata - async def post_delete_operation( - self, response: None - ) -> None: + async def post_delete_operation(self, response: None) -> None: """Post-rpc interceptor for delete_operation Override in a subclass to manipulate the response @@ -402,8 +473,12 @@ async def post_delete_operation( return response async def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -423,8 +498,12 @@ async def post_get_operation( return response async def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -444,8 +523,12 @@ async def post_list_operations( return response async def pre_wait_operation( - self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + self, + request: operations_pb2.WaitOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for wait_operation Override in a subclass to manipulate the request or metadata @@ -471,6 +554,7 @@ class AsyncCloudRedisRestStub: _host: str _interceptor: AsyncCloudRedisRestInterceptor + class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): """Asynchronous REST backend transport for CloudRedis. @@ -502,38 +586,40 @@ class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, - *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials_async.Credentials] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - url_scheme: str = 'https', - interceptor: Optional[AsyncCloudRedisRestInterceptor] = None, - ) -> None: + + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials_async.Credentials] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + url_scheme: str = "https", + interceptor: Optional[AsyncCloudRedisRestInterceptor] = None, + ) -> None: """Instantiate the transport. - NOTE: This async REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). - credentials (Optional[google.auth.aio.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - url_scheme (str): the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - interceptor (Optional[AsyncCloudRedisRestInterceptor]): Interceptor used - to manipulate requests, request metadata, and responses. + NOTE: This async REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[google.auth.aio.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + url_scheme (str): the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[AsyncCloudRedisRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. """ # Run the base constructor super().__init__( @@ -542,16 +628,18 @@ def __init__(self, client_info=client_info, always_use_jwt_access=False, url_scheme=url_scheme, - api_audience=None + api_audience=None, ) self._session = AsyncAuthorizedSession(self._credentials) # type: ignore self._interceptor = interceptor or AsyncCloudRedisRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = ( + None + ) def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_instances: self._wrap_method( self.list_instances, @@ -620,7 +708,9 @@ def _wrap_method(self, func, *args, **kwargs): kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - class _CreateInstance(_BaseCloudRedisRestTransport._BaseCreateInstance, AsyncCloudRedisRestStub): + class _CreateInstance( + _BaseCloudRedisRestTransport._BaseCreateInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.CreateInstance") @@ -632,27 +722,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -675,32 +767,50 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_create_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_create_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "httpRequest": http_request, @@ -709,16 +819,28 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = operations_pb2.Operation() @@ -727,20 +849,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_create_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_create_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.create_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "metadata": http_response["headers"], @@ -750,7 +876,9 @@ async def __call__(self, return resp - class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, AsyncCloudRedisRestStub): + class _DeleteInstance( + _BaseCloudRedisRestTransport._BaseDeleteInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.DeleteInstance") @@ -762,26 +890,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -804,30 +934,44 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_delete_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_delete_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "httpRequest": http_request, @@ -836,16 +980,27 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = operations_pb2.Operation() @@ -854,20 +1009,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_delete_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.delete_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "metadata": http_response["headers"], @@ -877,7 +1036,9 @@ async def __call__(self, return resp - class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, AsyncCloudRedisRestStub): + class _GetInstance( + _BaseCloudRedisRestTransport._BaseGetInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.GetInstance") @@ -889,26 +1050,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.Instance: + async def __call__( + self, + request: cloud_redis.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. Args: @@ -928,30 +1091,46 @@ async def __call__(self, A Memorystore for Redis instance. """ - http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_get_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_get_instance( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "httpRequest": http_request, @@ -960,16 +1139,27 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = cloud_redis.Instance() @@ -978,20 +1168,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_get_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_get_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = cloud_redis.Instance.to_json(response) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.get_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "metadata": http_response["headers"], @@ -1001,7 +1195,9 @@ async def __call__(self, return resp - class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, AsyncCloudRedisRestStub): + class _ListInstances( + _BaseCloudRedisRestTransport._BaseListInstances, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.ListInstances") @@ -1013,26 +1209,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.ListInstancesResponse: + async def __call__( + self, + request: cloud_redis.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -1054,30 +1252,46 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + ) - request, metadata = await self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_list_instances( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "httpRequest": http_request, @@ -1086,16 +1300,27 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = cloud_redis.ListInstancesResponse() @@ -1104,20 +1329,26 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_list_instances(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_list_instances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: - response_payload = cloud_redis.ListInstancesResponse.to_json(response) + response_payload = cloud_redis.ListInstancesResponse.to_json( + response + ) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.list_instances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "metadata": http_response["headers"], @@ -1127,7 +1358,9 @@ async def __call__(self, return resp - class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, AsyncCloudRedisRestStub): + class _UpdateInstance( + _BaseCloudRedisRestTransport._BaseUpdateInstance, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.UpdateInstance") @@ -1139,27 +1372,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -1182,32 +1417,50 @@ async def __call__(self, """ - http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + ) - request, metadata = await self._interceptor.pre_update_instance(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_update_instance( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request( + http_options, request + ) - body = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "httpRequest": http_request, @@ -1216,16 +1469,28 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore # Return the response resp = operations_pb2.Operation() @@ -1234,20 +1499,24 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_update_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = await self._interceptor.post_update_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + resp, _ = await self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.update_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "metadata": http_response["headers"], @@ -1267,87 +1536,93 @@ def operations_client(self) -> AsyncOperationsRestClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], - 'google.longrunning.Operations.WaitOperation': [ + "google.longrunning.Operations.WaitOperation": [ { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", }, ], } rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore - host=self._host, - # use the credentials which are saved - credentials=self._credentials, # type: ignore - http_options=http_options, - path_prefix="v1" + host=self._host, + # use the credentials which are saved + credentials=self._credentials, # type: ignore + http_options=http_options, + path_prefix="v1", ) - self._operations_client = AsyncOperationsRestClient(transport=rest_transport) + self._operations_client = AsyncOperationsRestClient( + transport=rest_transport + ) # Return the client from cache. return self._operations_client @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse + ]: return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, AsyncCloudRedisRestStub): + class _GetLocation( + _BaseCloudRedisRestTransport._BaseGetLocation, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.GetLocation") @@ -1359,27 +1634,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - + async def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -1397,30 +1673,46 @@ async def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + ) - request, metadata = await self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_get_location( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpRequest": http_request, @@ -1429,34 +1721,47 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore content = await response.read() resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpResponse": http_response, @@ -1467,9 +1772,11 @@ async def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, AsyncCloudRedisRestStub): + class _ListLocations( + _BaseCloudRedisRestTransport._BaseListLocations, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.ListLocations") @@ -1481,27 +1788,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - + async def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -1519,30 +1827,46 @@ async def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + ) - request, metadata = await self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_list_locations( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpRequest": http_request, @@ -1551,34 +1875,47 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore content = await response.read() resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpResponse": http_response, @@ -1589,9 +1926,11 @@ async def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, AsyncCloudRedisRestStub): + class _CancelOperation( + _BaseCloudRedisRestTransport._BaseCancelOperation, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.CancelOperation") @@ -1603,27 +1942,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + async def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -1638,30 +1978,42 @@ async def __call__(self, be of type `bytes`. """ - http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + ) - request, metadata = await self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CancelOperation", "httpRequest": http_request, @@ -1670,24 +2022,39 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + await AsyncCloudRedisRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore return await self._interceptor.post_cancel_operation(None) @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, AsyncCloudRedisRestStub): + class _DeleteOperation( + _BaseCloudRedisRestTransport._BaseDeleteOperation, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.DeleteOperation") @@ -1699,27 +2066,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + async def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -1734,30 +2102,42 @@ async def __call__(self, be of type `bytes`. """ - http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + ) - request, metadata = await self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteOperation", "httpRequest": http_request, @@ -1766,24 +2146,39 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = ( + await AsyncCloudRedisRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore return await self._interceptor.post_delete_operation(None) @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, AsyncCloudRedisRestStub): + class _GetOperation( + _BaseCloudRedisRestTransport._BaseGetOperation, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.GetOperation") @@ -1795,27 +2190,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + async def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -1833,30 +2229,46 @@ async def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + ) - request, metadata = await self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_get_operation( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpRequest": http_request, @@ -1865,34 +2277,47 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore content = await response.read() resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpResponse": http_response, @@ -1903,9 +2328,11 @@ async def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, AsyncCloudRedisRestStub): + class _ListOperations( + _BaseCloudRedisRestTransport._BaseListOperations, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.ListOperations") @@ -1917,27 +2344,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - + async def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -1955,30 +2383,44 @@ async def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + ) - request, metadata = await self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_list_operations( + request, metadata + ) + transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpRequest": http_request, @@ -1987,34 +2429,47 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore content = await response.read() resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpResponse": http_response, @@ -2025,9 +2480,11 @@ async def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore - class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, AsyncCloudRedisRestStub): + class _WaitOperation( + _BaseCloudRedisRestTransport._BaseWaitOperation, AsyncCloudRedisRestStub + ): def __hash__(self): return hash("AsyncCloudRedisRestTransport.WaitOperation") @@ -2039,28 +2496,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: operations_pb2.WaitOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + async def __call__( + self, + request: operations_pb2.WaitOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the wait operation method over HTTP. Args: @@ -2078,32 +2536,52 @@ async def __call__(self, operations_pb2.Operation: Response from WaitOperation method. """ - http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + http_options = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + ) - request, metadata = await self._interceptor.pre_wait_operation(request, metadata) - transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) + request, metadata = await self._interceptor.pre_wait_operation( + request, metadata + ) + transcoded_request = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request( + http_options, request + ) + ) - body = _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json(transcoded_request) + body = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + query_params = ( + _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json( + transcoded_request + ) + ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpRequest": http_request, @@ -2112,34 +2590,48 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._WaitOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + raise core_exceptions.format_http_response_error( + response, method, request_url, payload + ) # type: ignore content = await response.read() resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_wait_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpResponse": http_response, diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py index 85b3522cb282..8caeb7885666 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -14,19 +14,16 @@ # limitations under the License. # import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO - import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from .base import DEFAULT_CLIENT_INFO, CloudRedisTransport class _BaseCloudRedisRestTransport(CloudRedisTransport): @@ -42,14 +39,16 @@ class _BaseCloudRedisRestTransport(CloudRedisTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): @@ -73,7 +72,9 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER url_match_items = maybe_url_match.groupdict() @@ -84,27 +85,33 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseCreateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "instanceId" : "", } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + "body": "instance", + }, ] return http_options @@ -119,17 +126,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseCreateInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -137,19 +150,23 @@ class _BaseDeleteInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -161,11 +178,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseDeleteInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -173,19 +196,23 @@ class _BaseGetInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -197,11 +224,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseGetInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -209,19 +242,23 @@ class _BaseListInstances: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + }, ] return http_options @@ -233,11 +270,17 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseListInstances._get_unset_required_fields( + query_params + ) + ) return query_params @@ -245,20 +288,26 @@ class _BaseUpdateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } @classmethod def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance.name=projects/*/locations/*/instances/*}', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{instance.name=projects/*/locations/*/instances/*}", + "body": "instance", + }, ] return http_options @@ -273,17 +322,23 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], use_integers_for_enums=False ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(_BaseCloudRedisRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseCloudRedisRestTransport._BaseUpdateInstance._get_unset_required_fields( + query_params + ) + ) return query_params @@ -293,23 +348,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListLocations: @@ -318,23 +373,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseCancelOperation: @@ -343,23 +398,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseDeleteOperation: @@ -368,23 +423,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetOperation: @@ -393,23 +448,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListOperations: @@ -418,23 +473,23 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseWaitOperation: @@ -443,31 +498,30 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", + }, ] return http_options @staticmethod def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = path_template.transcode(http_options, **request_kwargs) return transcoded_request @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseCloudRedisRestTransport', -) +__all__ = ("_BaseCloudRedisRestTransport",) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py index 1e420395cc1d..3828d7a37c91 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py @@ -31,18 +31,18 @@ ) __all__ = ( - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'MaintenancePolicy', - 'MaintenanceSchedule', - 'NodeInfo', - 'OperationMetadata', - 'PersistenceConfig', - 'TlsCertificate', - 'UpdateInstanceRequest', - 'WeeklyMaintenanceWindow', + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "PersistenceConfig", + "TlsCertificate", + "UpdateInstanceRequest", + "WeeklyMaintenanceWindow", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py index 8022b120202a..fe8f93d01ead 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py @@ -17,32 +17,30 @@ from typing import MutableMapping, MutableSequence -import proto # type: ignore - import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.type.dayofweek_pb2 as dayofweek_pb2 # type: ignore import google.type.timeofday_pb2 as timeofday_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( - package='google.cloud.redis.v1', + package="google.cloud.redis.v1", manifest={ - 'NodeInfo', - 'Instance', - 'PersistenceConfig', - 'MaintenancePolicy', - 'WeeklyMaintenanceWindow', - 'MaintenanceSchedule', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'GetInstanceRequest', - 'CreateInstanceRequest', - 'UpdateInstanceRequest', - 'DeleteInstanceRequest', - 'OperationMetadata', - 'TlsCertificate', + "NodeInfo", + "Instance", + "PersistenceConfig", + "MaintenancePolicy", + "WeeklyMaintenanceWindow", + "MaintenanceSchedule", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "OperationMetadata", + "TlsCertificate", }, ) @@ -253,6 +251,7 @@ class Instance(proto.Message): Optional. The available maintenance versions that an instance could update to. """ + class State(proto.Enum): r"""Represents the different states of a Redis instance. @@ -284,6 +283,7 @@ class State(proto.Enum): Redis instance is failing over (availability may be affected). """ + STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 @@ -305,6 +305,7 @@ class Tier(proto.Enum): STANDARD_HA (3): STANDARD_HA tier: highly available primary/replica instances """ + TIER_UNSPECIFIED = 0 BASIC = 1 STANDARD_HA = 3 @@ -324,6 +325,7 @@ class ConnectMode(proto.Enum): access provides an IP address range for multiple Google Cloud services, including Memorystore. """ + CONNECT_MODE_UNSPECIFIED = 0 DIRECT_PEERING = 1 PRIVATE_SERVICE_ACCESS = 2 @@ -340,6 +342,7 @@ class TransitEncryptionMode(proto.Enum): DISABLED (2): TLS is disabled for the instance. """ + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED = 0 SERVER_AUTHENTICATION = 1 DISABLED = 2 @@ -360,6 +363,7 @@ class ReadReplicasMode(proto.Enum): and the instance can scale up and down the number of replicas. Not valid for basic tier. """ + READ_REPLICAS_MODE_UNSPECIFIED = 0 READ_REPLICAS_DISABLED = 1 READ_REPLICAS_ENABLED = 2 @@ -375,6 +379,7 @@ class SuspensionReason(proto.Enum): Something wrong with the CMEK key provided by customer. """ + SUSPENSION_REASON_UNSPECIFIED = 0 CUSTOMER_MANAGED_KEY_ISSUE = 1 @@ -468,34 +473,34 @@ class SuspensionReason(proto.Enum): proto.BOOL, number=23, ) - server_ca_certs: MutableSequence['TlsCertificate'] = proto.RepeatedField( + server_ca_certs: MutableSequence["TlsCertificate"] = proto.RepeatedField( proto.MESSAGE, number=25, - message='TlsCertificate', + message="TlsCertificate", ) transit_encryption_mode: TransitEncryptionMode = proto.Field( proto.ENUM, number=26, enum=TransitEncryptionMode, ) - maintenance_policy: 'MaintenancePolicy' = proto.Field( + maintenance_policy: "MaintenancePolicy" = proto.Field( proto.MESSAGE, number=27, - message='MaintenancePolicy', + message="MaintenancePolicy", ) - maintenance_schedule: 'MaintenanceSchedule' = proto.Field( + maintenance_schedule: "MaintenanceSchedule" = proto.Field( proto.MESSAGE, number=28, - message='MaintenanceSchedule', + message="MaintenanceSchedule", ) replica_count: int = proto.Field( proto.INT32, number=31, ) - nodes: MutableSequence['NodeInfo'] = proto.RepeatedField( + nodes: MutableSequence["NodeInfo"] = proto.RepeatedField( proto.MESSAGE, number=32, - message='NodeInfo', + message="NodeInfo", ) read_endpoint: str = proto.Field( proto.STRING, @@ -514,10 +519,10 @@ class SuspensionReason(proto.Enum): proto.STRING, number=36, ) - persistence_config: 'PersistenceConfig' = proto.Field( + persistence_config: "PersistenceConfig" = proto.Field( proto.MESSAGE, number=37, - message='PersistenceConfig', + message="PersistenceConfig", ) suspension_reasons: MutableSequence[SuspensionReason] = proto.RepeatedField( proto.ENUM, @@ -559,6 +564,7 @@ class PersistenceConfig(proto.Message): future snapshots will be aligned. If not provided, the current time will be used. """ + class PersistenceMode(proto.Enum): r"""Available Persistence modes. @@ -571,6 +577,7 @@ class PersistenceMode(proto.Enum): RDB (2): RDB based Persistence is enabled. """ + PERSISTENCE_MODE_UNSPECIFIED = 0 DISABLED = 1 RDB = 2 @@ -590,6 +597,7 @@ class SnapshotPeriod(proto.Enum): TWENTY_FOUR_HOURS (6): Snapshot every 24 hours. """ + SNAPSHOT_PERIOD_UNSPECIFIED = 0 ONE_HOUR = 3 SIX_HOURS = 4 @@ -652,10 +660,12 @@ class MaintenancePolicy(proto.Message): proto.STRING, number=3, ) - weekly_maintenance_window: MutableSequence['WeeklyMaintenanceWindow'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='WeeklyMaintenanceWindow', + weekly_maintenance_window: MutableSequence["WeeklyMaintenanceWindow"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=4, + message="WeeklyMaintenanceWindow", + ) ) @@ -801,10 +811,10 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances: MutableSequence['Instance'] = proto.RepeatedField( + instances: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance', + message="Instance", ) next_page_token: str = proto.Field( proto.STRING, @@ -863,10 +873,10 @@ class CreateInstanceRequest(proto.Message): proto.STRING, number=2, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=3, - message='Instance', + message="Instance", ) @@ -896,10 +906,10 @@ class UpdateInstanceRequest(proto.Message): number=1, message=field_mask_pb2.FieldMask, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=2, - message='Instance', + message="Instance", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py index abaab5a4121d..3b773ab8cbd9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py @@ -17,9 +17,8 @@ import pathlib import re import shutil - -from typing import Dict, List import warnings +from typing import Dict, List import nox @@ -154,7 +153,8 @@ def lint(session): # 2. Check formatting session.run( - "ruff", "format", + "ruff", + "format", "--check", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", @@ -167,12 +167,15 @@ def lint(session): @nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): """(Deprecated) Legacy session. Please use 'nox -s format'.""" - session.log("WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future.") + session.log( + "WARNING: The 'blacken' session is deprecated and will be removed in a future release. Please use 'nox -s format' in the future." + ) # Just run the ruff formatter (keeping legacy behavior of only formatting, not sorting imports) session.install(RUFF_VERSION) session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", *LINT_PATHS, @@ -191,8 +194,10 @@ def format(session): # check --select I: Enables strict import sorting # --fix: Applies the changes automatically session.run( - "ruff", "check", - "--select", "I", + "ruff", + "check", + "--select", + "I", "--fix", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length @@ -201,7 +206,8 @@ def format(session): # 3. Run Ruff to format code session.run( - "ruff", "format", + "ruff", + "format", f"--target-version=py{ALL_PYTHON[0].replace('.', '')}", "--line-length=88", # Standard Black line length *LINT_PATHS, @@ -386,8 +392,10 @@ def docs(session): "sphinx-build", "-T", # show full traceback on exception "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + "-b", + "html", # builder + "-d", + os.path.join("docs", "_build", "doctrees", ""), # cache directory # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py index 6cb8469248af..4c2cef2c45cf 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py @@ -60,4 +60,5 @@ async def sample_create_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_CreateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py index ea546b2795ac..d52c5aa49358 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py @@ -60,4 +60,5 @@ def sample_create_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_CreateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py index 45dcc1896a8d..75706750f56d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py @@ -53,4 +53,5 @@ async def sample_delete_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_DeleteInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py index 610246d96510..6aafcb787ba5 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py @@ -53,4 +53,5 @@ def sample_delete_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_DeleteInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py index 241282db64fb..5b3e2c759b9f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py @@ -49,4 +49,5 @@ async def sample_get_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_GetInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py index 5de5dd3bba9f..6c3054d1b6d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py @@ -49,4 +49,5 @@ def sample_get_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_GetInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py index 67c9a3c86e46..0e2bb45a0bf9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py @@ -50,4 +50,5 @@ async def sample_list_instances(): async for response in page_result: print(response) + # [END redis_v1_generated_CloudRedis_ListInstances_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py index 2922fb554895..99b1b531b505 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py @@ -50,4 +50,5 @@ def sample_list_instances(): for response in page_result: print(response) + # [END redis_v1_generated_CloudRedis_ListInstances_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py index 4243eca30122..4d4c30233a22 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py @@ -58,4 +58,5 @@ async def sample_update_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_UpdateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py index b5ae40fbab41..28d56e4bb691 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py @@ -58,4 +58,5 @@ def sample_update_instance(): # Handle the response print(response) + # [END redis_v1_generated_CloudRedis_UpdateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py index e121207415e7..1ecce6251b63 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py @@ -17,20 +17,20 @@ import os import re -import setuptools # type: ignore +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -name = 'google-cloud-redis' +name = "google-cloud-redis" description = "Google Cloud Redis API client library" version = None -with open(os.path.join(package_root, 'google/cloud/redis/gapic_version.py')) as fp: +with open(os.path.join(package_root, "google/cloud/redis/gapic_version.py")) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) + assert len(version_candidates) == 1 version = version_candidates[0] if version[0] == "0": @@ -52,7 +52,7 @@ extras = { "async_rest": [ "google-api-core[grpc] >= 2.21.0, < 3.0.0", - "google-auth[aiohttp] >= 2.35.0, <3.0.0" + "google-auth[aiohttp] >= 2.35.0, <3.0.0", ], } url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis" diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py index 191773d5572d..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py @@ -1,4 +1,3 @@ - # -*- coding: utf-8 -*- # Copyright 2025 Google LLC # diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py index 526046d104ec..63ceb01fe681 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -14,6 +14,7 @@ # limitations under the License. # import os + # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -21,55 +22,37 @@ except ImportError: # pragma: NO COVER import mock -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format import json import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc import pytest -from collections.abc import Sequence, Mapping from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule +from google.protobuf import json_format +from grpc.experimental import aio from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule + try: import aiohttp # type: ignore - from google.auth.aio.transport.sessions import AsyncAuthorizedSession from google.api_core.operations_v1 import AsyncOperationsRestClient + from google.auth.aio.transport.sessions import AsyncAuthorizedSession + HAS_ASYNC_REST_EXTRA = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_ASYNC_REST_EXTRA = False -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session from google.protobuf import json_format +from requests import PreparedRequest, Request, Response +from requests.sessions import Session try: from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER +except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.location import locations_pb2 -from google.cloud.redis_v1.services.cloud_redis import CloudRedisAsyncClient -from google.cloud.redis_v1.services.cloud_redis import CloudRedisClient -from google.cloud.redis_v1.services.cloud_redis import pagers -from google.cloud.redis_v1.services.cloud_redis import transports -from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account import google.api_core.operation_async as operation_async # type: ignore import google.auth import google.protobuf.duration_pb2 as duration_pb2 # type: ignore @@ -78,8 +61,30 @@ import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.type.dayofweek_pb2 as dayofweek_pb2 # type: ignore import google.type.timeofday_pb2 as timeofday_pb2 # type: ignore - - +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.cloud.redis_v1.services.cloud_redis import ( + CloudRedisAsyncClient, + CloudRedisClient, + pagers, + transports, +) +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account CRED_INFO_JSON = { "credential_source": "/path/to/file", @@ -94,9 +99,11 @@ async def mock_async_gen(data, chunk_size=1): chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -104,17 +111,27 @@ def async_anonymous_credentials(): return ga_credentials_async.AnonymousCredentials() return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + # If default endpoint template is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint template so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) def test__get_default_mtls_endpoint(): @@ -126,12 +143,26 @@ def test__get_default_mtls_endpoint(): custom_endpoint = ".custom" assert CloudRedisClient._get_default_mtls_endpoint(None) is None - assert CloudRedisClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert CloudRedisClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert CloudRedisClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert CloudRedisClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ( + CloudRedisClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CloudRedisClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CloudRedisClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CloudRedisClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) assert CloudRedisClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - assert CloudRedisClient._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + assert ( + CloudRedisClient._get_default_mtls_endpoint(custom_endpoint) == custom_endpoint + ) + def test__read_environment_variables(): assert CloudRedisClient._read_environment_variables() == (False, "auto", None) @@ -154,10 +185,10 @@ def test__read_environment_variables(): ) else: assert CloudRedisClient._read_environment_variables() == ( - False, - "auto", - None, - ) + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert CloudRedisClient._read_environment_variables() == (False, "never", None) @@ -171,10 +202,17 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: CloudRedisClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert CloudRedisClient._read_environment_variables() == (False, "auto", "foo.com") + assert CloudRedisClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) def test_use_client_cert_effective(): @@ -183,7 +221,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): assert CloudRedisClient._use_client_cert_effective() is True # Test case 2: Test when `should_use_client_cert` returns False. @@ -191,7 +231,9 @@ def test_use_client_cert_effective(): # the google-auth library supports automatic mTLS and determines that a # client certificate should NOT be used. if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): assert CloudRedisClient._use_client_cert_effective() is False # Test case 3: Test when `should_use_client_cert` is unavailable and the @@ -203,7 +245,9 @@ def test_use_client_cert_effective(): # Test case 4: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): assert CloudRedisClient._use_client_cert_effective() is False # Test case 5: Test when `should_use_client_cert` is unavailable and the @@ -215,7 +259,9 @@ def test_use_client_cert_effective(): # Test case 6: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): assert CloudRedisClient._use_client_cert_effective() is False # Test case 7: Test when `should_use_client_cert` is unavailable and the @@ -227,7 +273,9 @@ def test_use_client_cert_effective(): # Test case 8: Test when `should_use_client_cert` is unavailable and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): assert CloudRedisClient._use_client_cert_effective() is False # Test case 9: Test when `should_use_client_cert` is unavailable and the @@ -242,83 +290,167 @@ def test_use_client_cert_effective(): # The method should raise a ValueError as the environment variable must be either # "true" or "false". if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): with pytest.raises(ValueError): CloudRedisClient._use_client_cert_effective() # Test case 11: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. # The method should return False as the environment variable is set to an invalid value. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): assert CloudRedisClient._use_client_cert_effective() is False # Test case 12: Test when `should_use_client_cert` is available and the # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, # the GOOGLE_API_CONFIG environment variable is unset. - if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): assert CloudRedisClient._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() assert CloudRedisClient._get_client_cert_source(None, False) is None - assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + assert ( + CloudRedisClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + CloudRedisClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + CloudRedisClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + CloudRedisClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert CloudRedisClient._get_client_cert_source(None, True) is mock_default_cert_source - assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) +@mock.patch.object( + CloudRedisClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisClient), +) +@mock.patch.object( + CloudRedisAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisAsyncClient), +) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() default_universe = CloudRedisClient._DEFAULT_UNIVERSE - default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) - assert CloudRedisClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT - assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT - assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT - assert CloudRedisClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + assert ( + CloudRedisClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + CloudRedisClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CloudRedisClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + CloudRedisClient._get_api_endpoint(None, None, default_universe, "always") + == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CloudRedisClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CloudRedisClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + CloudRedisClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) with pytest.raises(MutualTLSChannelError) as excinfo: - CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + CloudRedisClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" - assert CloudRedisClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert CloudRedisClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert CloudRedisClient._get_universe_domain(None, None) == CloudRedisClient._DEFAULT_UNIVERSE + assert ( + CloudRedisClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + CloudRedisClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + CloudRedisClient._get_universe_domain(None, None) + == CloudRedisClient._DEFAULT_UNIVERSE + ) with pytest.raises(ValueError) as excinfo: CloudRedisClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): cred = mock.Mock(["get_cred_info"]) cred.get_cred_info = mock.Mock(return_value=cred_info_json) @@ -334,7 +466,8 @@ def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_in else: assert error.details == ["foo"] -@pytest.mark.parametrize("error_code", [401,403,404,500]) + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): cred = mock.Mock([]) assert not hasattr(cred, "get_cred_info") @@ -347,14 +480,20 @@ def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): client._add_cred_info_for_auth_errors(error) assert error.details == [] -@pytest.mark.parametrize("client_class,transport_name", [ - (CloudRedisClient, "grpc"), - (CloudRedisAsyncClient, "grpc_asyncio"), - (CloudRedisClient, "rest"), -]) + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudRedisClient, "grpc"), + (CloudRedisAsyncClient, "grpc_asyncio"), + (CloudRedisClient, "rest"), + ], +) def test_cloud_redis_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info, transport=transport_name) @@ -362,52 +501,68 @@ def test_cloud_redis_client_from_service_account_info(client_class, transport_na assert isinstance(client, client_class) assert client.transport._host == ( - 'redis.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://redis.googleapis.com' + "redis.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://redis.googleapis.com" ) -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.CloudRedisGrpcTransport, "grpc"), - (transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.CloudRedisRestTransport, "rest"), -]) -def test_cloud_redis_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CloudRedisGrpcTransport, "grpc"), + (transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudRedisRestTransport, "rest"), + ], +) +def test_cloud_redis_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class,transport_name", [ - (CloudRedisClient, "grpc"), - (CloudRedisAsyncClient, "grpc_asyncio"), - (CloudRedisClient, "rest"), -]) +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CloudRedisClient, "grpc"), + (CloudRedisAsyncClient, "grpc_asyncio"), + (CloudRedisClient, "rest"), + ], +) def test_cloud_redis_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == ( - 'redis.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://redis.googleapis.com' + "redis.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://redis.googleapis.com" ) @@ -423,30 +578,45 @@ def test_cloud_redis_client_get_transport_class(): assert transport == transports.CloudRedisGrpcTransport -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), - (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), -]) -@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) -def test_cloud_redis_client_client_options(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), + ], +) +@mock.patch.object( + CloudRedisClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisClient), +) +@mock.patch.object( + CloudRedisAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisAsyncClient), +) +def test_cloud_redis_client_client_options( + client_class, transport_class, transport_name +): # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudRedisClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) + with mock.patch.object(CloudRedisClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudRedisClient, 'get_transport_class') as gtc: + with mock.patch.object(CloudRedisClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( @@ -464,13 +634,15 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -482,7 +654,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( @@ -502,17 +674,22 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -521,48 +698,82 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp api_audience=None, ) # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "true"), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "false"), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "true"), - (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "false"), -]) -@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "true"), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "false"), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "true"), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CloudRedisClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisClient), +) +@mock.patch.object( + CloudRedisAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisAsyncClient), +) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): +def test_cloud_redis_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -581,12 +792,22 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -607,15 +828,22 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo ) # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -625,19 +853,27 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo ) -@pytest.mark.parametrize("client_class", [ - CloudRedisClient, CloudRedisAsyncClient -]) -@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +@pytest.mark.parametrize("client_class", [CloudRedisClient, CloudRedisAsyncClient]) +@mock.patch.object( + CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient) +) +@mock.patch.object( + CloudRedisAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudRedisAsyncClient), +) def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source @@ -645,18 +881,25 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): if hasattr(google.auth.transport.mtls, "should_use_client_cert"): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options @@ -693,23 +936,23 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). test_cases = [ @@ -740,23 +983,23 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): env = os.environ.copy() env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") with mock.patch.dict(os.environ, env, clear=True): - config_filename = "mock_certificate_config.json" - config_file_content = json.dumps(config_data) - m = mock.mock_open(read_data=config_file_content) - with mock.patch("builtins.open", m): - with mock.patch.dict( - os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} - ): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, - api_endpoint=mock_api_endpoint, - ) - api_endpoint, cert_source = ( - client_class.get_mtls_endpoint_and_cert_source(options) - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is expected_cert_source + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): @@ -772,16 +1015,27 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @@ -791,27 +1045,48 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): with pytest.raises(MutualTLSChannelError) as excinfo: client_class.get_mtls_endpoint_and_cert_source() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + -@pytest.mark.parametrize("client_class", [ - CloudRedisClient, CloudRedisAsyncClient -]) -@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) +@pytest.mark.parametrize("client_class", [CloudRedisClient, CloudRedisAsyncClient]) +@mock.patch.object( + CloudRedisClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisClient), +) +@mock.patch.object( + CloudRedisAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CloudRedisAsyncClient), +) def test_cloud_redis_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback api_override = "foo.com" default_universe = CloudRedisClient._DEFAULT_UNIVERSE - default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) mock_universe = "bar.com" - mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", @@ -834,11 +1109,19 @@ def test_cloud_redis_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. @@ -846,27 +1129,40 @@ def test_cloud_redis_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) assert client.api_endpoint == default_endpoint -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), - (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), -]) -def test_cloud_redis_client_client_options_scopes(client_class, transport_class, transport_name): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), + ], +) +def test_cloud_redis_client_client_options_scopes( + client_class, transport_class, transport_name +): # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -875,24 +1171,35 @@ def test_cloud_redis_client_client_options_scopes(client_class, transport_class, api_audience=None, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (CloudRedisClient, transports.CloudRedisRestTransport, "rest", None), -]) -def test_cloud_redis_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", None), + ], +) +def test_cloud_redis_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -901,12 +1208,13 @@ def test_cloud_redis_client_client_options_credentials_file(client_class, transp api_audience=None, ) + def test_cloud_redis_client_client_options_from_dict(): - with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__') as grpc_transport: + with mock.patch( + "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__" + ) as grpc_transport: grpc_transport.return_value = None - client = CloudRedisClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) + client = CloudRedisClient(client_options={"api_endpoint": "squid.clam.whelk"}) grpc_transport.assert_called_once_with( credentials=None, credentials_file=None, @@ -920,23 +1228,33 @@ def test_cloud_redis_client_client_options_from_dict(): ) -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_cloud_redis_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), + ( + CloudRedisAsyncClient, + transports.CloudRedisGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cloud_redis_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) + options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -946,13 +1264,13 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp ) # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): creds = ga_credentials.AnonymousCredentials() file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) @@ -963,9 +1281,7 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp credentials=file_creds, credentials_file=None, quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=None, default_host="redis.googleapis.com", ssl_credentials=None, @@ -976,11 +1292,14 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.ListInstancesRequest, - dict, -]) -def test_list_instances(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ListInstancesRequest, + dict, + ], +) +def test_list_instances(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -991,13 +1310,11 @@ def test_list_instances(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_instances(request) @@ -1009,8 +1326,8 @@ def test_list_instances(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_instances_non_empty_request_with_auto_populated_field(): @@ -1018,30 +1335,31 @@ def test_list_instances_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_instances(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) + def test_list_instances_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1060,7 +1378,9 @@ def test_list_instances_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc request = {} client.list_instances(request) @@ -1074,8 +1394,11 @@ def test_list_instances_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_list_instances_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1089,12 +1412,17 @@ async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.list_instances in client._client._transport._wrapped_methods + assert ( + client._client._transport.list_instances + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.list_instances + ] = mock_rpc request = {} await client.list_instances(request) @@ -1108,8 +1436,11 @@ async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grp assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ListInstancesRequest): +async def test_list_instances_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.ListInstancesRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -1120,14 +1451,14 @@ async def test_list_instances_async(transport: str = 'grpc_asyncio', request_typ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) response = await client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -1138,14 +1469,15 @@ async def test_list_instances_async(transport: str = 'grpc_asyncio', request_typ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_instances_async_from_dict(): await test_list_instances_async(request_type=dict) + def test_list_instances_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1155,12 +1487,10 @@ def test_list_instances_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.ListInstancesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_redis.ListInstancesResponse() client.list_instances(request) @@ -1172,9 +1502,9 @@ def test_list_instances_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1187,13 +1517,13 @@ async def test_list_instances_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.ListInstancesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse()) + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.ListInstancesResponse() + ) await client.list_instances(request) # Establish that the underlying gRPC stub method was called. @@ -1204,9 +1534,9 @@ async def test_list_instances_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_instances_flattened(): @@ -1215,15 +1545,13 @@ def test_list_instances_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_instances( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1231,7 +1559,7 @@ def test_list_instances_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1245,9 +1573,10 @@ def test_list_instances_flattened_error(): with pytest.raises(ValueError): client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) + @pytest.mark.asyncio async def test_list_instances_flattened_async(): client = CloudRedisAsyncClient( @@ -1255,17 +1584,17 @@ async def test_list_instances_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.ListInstancesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_instances( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1273,9 +1602,10 @@ async def test_list_instances_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val + @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): client = CloudRedisAsyncClient( @@ -1287,7 +1617,7 @@ async def test_list_instances_flattened_error_async(): with pytest.raises(ValueError): await client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1298,9 +1628,7 @@ def test_list_instances_pager(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -1309,17 +1637,17 @@ def test_list_instances_pager(transport_name: str = "grpc"): cloud_redis.Instance(), cloud_redis.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), cloud_redis.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), cloud_redis.ListInstancesResponse( instances=[ cloud_redis.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), cloud_redis.ListInstancesResponse( instances=[ @@ -1334,9 +1662,7 @@ def test_list_instances_pager(transport_name: str = "grpc"): retry = retries.Retry() timeout = 5 expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_instances(request={}, retry=retry, timeout=timeout) @@ -1346,8 +1672,9 @@ def test_list_instances_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_redis.Instance) - for i in results) + assert all(isinstance(i, cloud_redis.Instance) for i in results) + + def test_list_instances_pages(transport_name: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1355,9 +1682,7 @@ def test_list_instances_pages(transport_name: str = "grpc"): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -1366,17 +1691,17 @@ def test_list_instances_pages(transport_name: str = "grpc"): cloud_redis.Instance(), cloud_redis.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), cloud_redis.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), cloud_redis.ListInstancesResponse( instances=[ cloud_redis.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), cloud_redis.ListInstancesResponse( instances=[ @@ -1387,9 +1712,10 @@ def test_list_instances_pages(transport_name: str = "grpc"): RuntimeError, ) pages = list(client.list_instances(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token + @pytest.mark.asyncio async def test_list_instances_async_pager(): client = CloudRedisAsyncClient( @@ -1398,8 +1724,8 @@ async def test_list_instances_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -1408,17 +1734,17 @@ async def test_list_instances_async_pager(): cloud_redis.Instance(), cloud_redis.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), cloud_redis.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), cloud_redis.ListInstancesResponse( instances=[ cloud_redis.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), cloud_redis.ListInstancesResponse( instances=[ @@ -1428,15 +1754,16 @@ async def test_list_instances_async_pager(): ), RuntimeError, ) - async_pager = await client.list_instances(request={},) - assert async_pager.next_page_token == 'abc' + async_pager = await client.list_instances( + request={}, + ) + assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: # pragma: no branch + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, cloud_redis.Instance) - for i in responses) + assert all(isinstance(i, cloud_redis.Instance) for i in responses) @pytest.mark.asyncio @@ -1447,8 +1774,8 @@ async def test_list_instances_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: + type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock + ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_redis.ListInstancesResponse( @@ -1457,17 +1784,17 @@ async def test_list_instances_async_pages(): cloud_redis.Instance(), cloud_redis.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), cloud_redis.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), cloud_redis.ListInstancesResponse( instances=[ cloud_redis.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), cloud_redis.ListInstancesResponse( instances=[ @@ -1480,18 +1807,22 @@ async def test_list_instances_async_pages(): pages = [] # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch + async for page_ in ( # pragma: no branch await client.list_instances(request={}) ).pages: pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceRequest, - dict, -]) -def test_get_instance(request_type, transport: str = 'grpc'): + +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.GetInstanceRequest, + dict, + ], +) +def test_get_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1502,38 +1833,38 @@ def test_get_instance(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + customer_managed_key="customer_managed_key_value", + suspension_reasons=[ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) response = client.get_instance(request) @@ -1545,33 +1876,43 @@ def test_get_instance(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True - assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert ( + response.transit_encryption_mode + == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + ) assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 - assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' - assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert ( + response.read_replicas_mode + == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + ) + assert response.customer_managed_key == "customer_managed_key_value" + assert response.suspension_reasons == [ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == [ + "available_maintenance_versions_value" + ] def test_get_instance_non_empty_request_with_auto_populated_field(): @@ -1579,28 +1920,29 @@ def test_get_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.GetInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceRequest( - name='name_value', + name="name_value", ) + def test_get_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1619,7 +1961,9 @@ def test_get_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc request = {} client.get_instance(request) @@ -1633,8 +1977,11 @@ def test_get_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_get_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -1648,12 +1995,17 @@ async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.get_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.get_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.get_instance + ] = mock_rpc request = {} await client.get_instance(request) @@ -1667,51 +2019,56 @@ async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceRequest): +async def test_get_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.GetInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], - )) + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.Instance( + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", + port=453, + current_location_id="current_location_id_value", + state=cloud_redis.Instance.State.CREATING, + status_message="status_message_value", + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint="read_endpoint_value", + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key="customer_managed_key_value", + suspension_reasons=[ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], + ) + ) response = await client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1722,39 +2079,50 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True - assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert ( + response.transit_encryption_mode + == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + ) assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 - assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' - assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert ( + response.read_replicas_mode + == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + ) + assert response.customer_managed_key == "customer_managed_key_value" + assert response.suspension_reasons == [ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == [ + "available_maintenance_versions_value" + ] @pytest.mark.asyncio async def test_get_instance_async_from_dict(): await test_get_instance_async(request_type=dict) + def test_get_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1764,12 +2132,10 @@ def test_get_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_redis.Instance() client.get_instance(request) @@ -1781,9 +2147,9 @@ def test_get_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1796,13 +2162,13 @@ async def test_get_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance()) + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.Instance() + ) await client.get_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1813,9 +2179,9 @@ async def test_get_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_instance_flattened(): @@ -1824,15 +2190,13 @@ def test_get_instance_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1840,7 +2204,7 @@ def test_get_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -1854,9 +2218,10 @@ def test_get_instance_flattened_error(): with pytest.raises(ValueError): client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_get_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -1864,17 +2229,17 @@ async def test_get_instance_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.Instance() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1882,9 +2247,10 @@ async def test_get_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -1896,15 +2262,18 @@ async def test_get_instance_flattened_error_async(): with pytest.raises(ValueError): await client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.CreateInstanceRequest, - dict, -]) -def test_create_instance(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.CreateInstanceRequest, + dict, + ], +) +def test_create_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1915,11 +2284,9 @@ def test_create_instance(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -1937,30 +2304,31 @@ def test_create_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', + parent="parent_value", + instance_id="instance_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', + parent="parent_value", + instance_id="instance_id_value", ) + def test_create_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1979,7 +2347,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc request = {} client.create_instance(request) @@ -1998,8 +2368,11 @@ def test_create_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_create_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2013,12 +2386,17 @@ async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.create_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.create_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.create_instance + ] = mock_rpc request = {} await client.create_instance(request) @@ -2037,8 +2415,11 @@ async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.CreateInstanceRequest): +async def test_create_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.CreateInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2049,12 +2430,10 @@ async def test_create_instance_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.create_instance(request) @@ -2072,6 +2451,7 @@ async def test_create_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_create_instance_async_from_dict(): await test_create_instance_async(request_type=dict) + def test_create_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2081,13 +2461,11 @@ def test_create_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.CreateInstanceRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2098,9 +2476,9 @@ def test_create_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2113,13 +2491,13 @@ async def test_create_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.CreateInstanceRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.create_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2130,9 +2508,9 @@ async def test_create_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_instance_flattened(): @@ -2141,17 +2519,15 @@ def test_create_instance_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2159,13 +2535,13 @@ def test_create_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].instance_id - mock_val = 'instance_id_value' + mock_val = "instance_id_value" assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @@ -2179,11 +2555,12 @@ def test_create_instance_flattened_error(): with pytest.raises(ValueError): client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) + @pytest.mark.asyncio async def test_create_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -2191,21 +2568,19 @@ async def test_create_instance_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2213,15 +2588,16 @@ async def test_create_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].instance_id - mock_val = 'instance_id_value' + mock_val = "instance_id_value" assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -2233,17 +2609,20 @@ async def test_create_instance_flattened_error_async(): with pytest.raises(ValueError): await client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpdateInstanceRequest, - dict, -]) -def test_update_instance(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.UpdateInstanceRequest, + dict, + ], +) +def test_update_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2254,11 +2633,9 @@ def test_update_instance(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2276,25 +2653,24 @@ def test_update_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = cloud_redis.UpdateInstanceRequest( - ) + request = cloud_redis.UpdateInstanceRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpdateInstanceRequest( - ) + assert args[0] == cloud_redis.UpdateInstanceRequest() + def test_update_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2314,7 +2690,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc request = {} client.update_instance(request) @@ -2333,8 +2711,11 @@ def test_update_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_update_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2348,12 +2729,17 @@ async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.update_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.update_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.update_instance + ] = mock_rpc request = {} await client.update_instance(request) @@ -2372,8 +2758,11 @@ async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpdateInstanceRequest): +async def test_update_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.UpdateInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2384,12 +2773,10 @@ async def test_update_instance_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.update_instance(request) @@ -2407,6 +2794,7 @@ async def test_update_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_update_instance_async_from_dict(): await test_update_instance_async(request_type=dict) + def test_update_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2416,13 +2804,11 @@ def test_update_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.UpdateInstanceRequest() - request.instance.name = 'name_value' + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2433,9 +2819,9 @@ def test_update_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'instance.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2448,13 +2834,13 @@ async def test_update_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.UpdateInstanceRequest() - request.instance.name = 'name_value' + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.update_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2465,9 +2851,9 @@ async def test_update_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'instance.name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] def test_update_instance_flattened(): @@ -2476,16 +2862,14 @@ def test_update_instance_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2493,10 +2877,10 @@ def test_update_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @@ -2510,10 +2894,11 @@ def test_update_instance_flattened_error(): with pytest.raises(ValueError): client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) + @pytest.mark.asyncio async def test_update_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -2521,20 +2906,18 @@ async def test_update_instance_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2542,12 +2925,13 @@ async def test_update_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val + @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -2559,16 +2943,19 @@ async def test_update_instance_flattened_error_async(): with pytest.raises(ValueError): await client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.DeleteInstanceRequest, - dict, -]) -def test_delete_instance(request_type, transport: str = 'grpc'): +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.DeleteInstanceRequest, + dict, + ], +) +def test_delete_instance(request_type, transport: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2579,11 +2966,9 @@ def test_delete_instance(request_type, transport: str = 'grpc'): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') + call.return_value = operations_pb2.Operation(name="operations/spam") response = client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2601,28 +2986,29 @@ def test_delete_instance_non_empty_request_with_auto_populated_field(): # automatically populated, according to AIP-4235, with non-empty requests. client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.DeleteInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.DeleteInstanceRequest( - name='name_value', + name="name_value", ) + def test_delete_instance_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2641,7 +3027,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc request = {} client.delete_instance(request) @@ -2660,8 +3048,11 @@ def test_delete_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_delete_instance_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -2675,12 +3066,17 @@ async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport.delete_instance in client._client._transport._wrapped_methods + assert ( + client._client._transport.delete_instance + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_instance] = mock_rpc + client._client._transport._wrapped_methods[ + client._client._transport.delete_instance + ] = mock_rpc request = {} await client.delete_instance(request) @@ -2699,8 +3095,11 @@ async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 + @pytest.mark.asyncio -async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.DeleteInstanceRequest): +async def test_delete_instance_async( + transport: str = "grpc_asyncio", request_type=cloud_redis.DeleteInstanceRequest +): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport=transport, @@ -2711,12 +3110,10 @@ async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_ty request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) response = await client.delete_instance(request) @@ -2734,6 +3131,7 @@ async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_delete_instance_async_from_dict(): await test_delete_instance_async(request_type=dict) + def test_delete_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2743,13 +3141,11 @@ def test_delete_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.DeleteInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2760,9 +3156,9 @@ def test_delete_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2775,13 +3171,13 @@ async def test_delete_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.DeleteInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. @@ -2792,9 +3188,9 @@ async def test_delete_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_instance_flattened(): @@ -2803,15 +3199,13 @@ def test_delete_instance_flattened(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2819,7 +3213,7 @@ def test_delete_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -2833,9 +3227,10 @@ def test_delete_instance_flattened_error(): with pytest.raises(ValueError): client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) + @pytest.mark.asyncio async def test_delete_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -2843,19 +3238,17 @@ async def test_delete_instance_flattened_async(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2863,9 +3256,10 @@ async def test_delete_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val + @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -2877,7 +3271,7 @@ async def test_delete_instance_flattened_error_async(): with pytest.raises(ValueError): await client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @@ -2899,7 +3293,9 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc request = {} @@ -2915,57 +3311,67 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstancesRequest): +def test_list_instances_rest_required_fields( + request_type=cloud_redis.ListInstancesRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -2976,23 +3382,32 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan return_value = cloud_redis.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_instances(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_list_instances_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) def test_list_instances_rest_flattened(): @@ -3002,16 +3417,16 @@ def test_list_instances_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse() # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -3021,7 +3436,7 @@ def test_list_instances_rest_flattened(): # Convert return value to protobuf type return_value = cloud_redis.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -3031,10 +3446,13 @@ def test_list_instances_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) -def test_list_instances_rest_flattened_error(transport: str = 'rest'): +def test_list_instances_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3045,20 +3463,20 @@ def test_list_instances_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) -def test_list_instances_rest_pager(transport: str = 'rest'): +def test_list_instances_rest_pager(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( cloud_redis.ListInstancesResponse( @@ -3067,17 +3485,17 @@ def test_list_instances_rest_pager(transport: str = 'rest'): cloud_redis.Instance(), cloud_redis.Instance(), ], - next_page_token='abc', + next_page_token="abc", ), cloud_redis.ListInstancesResponse( instances=[], - next_page_token='def', + next_page_token="def", ), cloud_redis.ListInstancesResponse( instances=[ cloud_redis.Instance(), ], - next_page_token='ghi', + next_page_token="ghi", ), cloud_redis.ListInstancesResponse( instances=[ @@ -3093,21 +3511,20 @@ def test_list_instances_rest_pager(transport: str = 'rest'): response = tuple(cloud_redis.ListInstancesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') + return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} pager = client.list_instances(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, cloud_redis.Instance) - for i in results) + assert all(isinstance(i, cloud_redis.Instance) for i in results) pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -3129,7 +3546,9 @@ def test_get_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc request = {} @@ -3152,48 +3571,51 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance() # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -3204,23 +3626,24 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR return_value = cloud_redis.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_get_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_get_instance_rest_flattened(): @@ -3230,16 +3653,18 @@ def test_get_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -3249,7 +3674,7 @@ def test_get_instance_rest_flattened(): # Convert return value to protobuf type return_value = cloud_redis.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -3259,10 +3684,13 @@ def test_get_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) -def test_get_instance_rest_flattened_error(transport: str = 'rest'): +def test_get_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3273,7 +3701,7 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) @@ -3295,7 +3723,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc request = {} @@ -3315,7 +3745,9 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateInstanceRequest): +def test_create_instance_rest_required_fields( + request_type=cloud_redis.CreateInstanceRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} @@ -3323,65 +3755,68 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns request_init["instance_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped assert "instanceId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present assert "instanceId" in jsonified_request assert jsonified_request["instanceId"] == request_init["instance_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceId"] = 'instance_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("instance_id", )) + assert not set(unset_fields) - set(("instance_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == 'instance_id_value' + assert jsonified_request["instanceId"] == "instance_id_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -3393,15 +3828,26 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns "", ), ] - actual_params = req.call_args.kwargs['params'] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_create_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.create_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("instanceId", )) & set(("parent", "instanceId", "instance", ))) + assert set(unset_fields) == ( + set(("instanceId",)) + & set( + ( + "parent", + "instanceId", + "instance", + ) + ) + ) def test_create_instance_rest_flattened(): @@ -3411,18 +3857,18 @@ def test_create_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) mock_args.update(sample_request) @@ -3430,7 +3876,7 @@ def test_create_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -3440,10 +3886,13 @@ def test_create_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, + args[1], + ) -def test_create_instance_rest_flattened_error(transport: str = 'rest'): +def test_create_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3454,9 +3903,9 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) @@ -3478,7 +3927,9 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc request = {} @@ -3498,77 +3949,91 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateInstanceRequest): +def test_update_instance_rest_required_fields( + request_type=cloud_redis.UpdateInstanceRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, } - transcode_result['body'] = pb_request + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_instance(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_update_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("updateMask", "instance", ))) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "instance", + ) + ) + ) def test_update_instance_rest_flattened(): @@ -3578,17 +4043,19 @@ def test_update_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + sample_request = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } # get truthy value for each flattened field mock_args = dict( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) mock_args.update(sample_request) @@ -3596,7 +4063,7 @@ def test_update_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -3606,10 +4073,14 @@ def test_update_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{instance.name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{instance.name=projects/*/locations/*/instances/*}" + % client.transport._host, + args[1], + ) -def test_update_instance_rest_flattened_error(transport: str = 'rest'): +def test_update_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3620,8 +4091,8 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) @@ -3643,7 +4114,9 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc request = {} @@ -3663,55 +4136,60 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteInstanceRequest): +def test_delete_instance_rest_required_fields( + request_type=cloud_redis.DeleteInstanceRequest, +): transport_class = transports.CloudRedisRestTransport request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport="rest", ) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values # for required fields will fail the real version if the http_options # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: + with mock.patch.object(path_template, "transcode") as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. pb_request = request_type.pb(request) transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } transcode.return_value = transcode_result @@ -3719,23 +4197,24 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_instance(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] + expected_params = [] + actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params def test_delete_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) unset_fields = transport.delete_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + assert set(unset_fields) == (set(()) & set(("name",))) def test_delete_instance_rest_flattened(): @@ -3745,16 +4224,18 @@ def test_delete_instance_rest_flattened(): ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3" + } # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -3762,7 +4243,7 @@ def test_delete_instance_rest_flattened(): response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -3772,10 +4253,13 @@ def test_delete_instance_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, + args[1], + ) -def test_delete_instance_rest_flattened_error(transport: str = 'rest'): +def test_delete_instance_rest_flattened_error(transport: str = "rest"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3786,7 +4270,7 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @@ -3828,8 +4312,7 @@ def test_credentials_transport_error(): options.api_key = "api_key" with pytest.raises(ValueError): client = CloudRedisClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. @@ -3851,6 +4334,7 @@ def test_transport_instance(): client = CloudRedisClient(transport=transport) assert client.transport is transport + def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.CloudRedisGrpcTransport( @@ -3865,18 +4349,23 @@ def test_transport_get_channel(): channel = transport.grpc_channel assert channel -@pytest.mark.parametrize("transport_class", [ - transports.CloudRedisGrpcTransport, - transports.CloudRedisGrpcAsyncIOTransport, - transports.CloudRedisRestTransport, -]) + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + transports.CloudRedisRestTransport, + ], +) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() + def test_transport_kind_grpc(): transport = CloudRedisClient.get_transport_class("grpc")( credentials=ga_credentials.AnonymousCredentials() @@ -3886,8 +4375,7 @@ def test_transport_kind_grpc(): def test_initialize_client_w_grpc(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) assert client is not None @@ -3901,9 +4389,7 @@ def test_list_instances_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: call.return_value = cloud_redis.ListInstancesResponse() client.list_instances(request=None) @@ -3924,9 +4410,7 @@ def test_get_instance_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: call.return_value = cloud_redis.Instance() client.get_instance(request=None) @@ -3947,10 +4431,8 @@ def test_create_instance_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.create_instance(request=None) # Establish that the underlying stub method was called. @@ -3970,10 +4452,8 @@ def test_update_instance_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.update_instance(request=None) # Establish that the underlying stub method was called. @@ -3993,10 +4473,8 @@ def test_delete_instance_empty_call_grpc(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") client.delete_instance(request=None) # Establish that the underlying stub method was called. @@ -4016,8 +4494,7 @@ def test_transport_kind_grpc_asyncio(): def test_initialize_client_w_grpc_asyncio(): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) assert client is not None @@ -4032,14 +4509,14 @@ async def test_list_instances_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.ListInstancesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) await client.list_instances(request=None) # Establish that the underlying stub method was called. @@ -4060,39 +4537,41 @@ async def test_get_instance_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], - )) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloud_redis.Instance( + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", + port=453, + current_location_id="current_location_id_value", + state=cloud_redis.Instance.State.CREATING, + status_message="status_message_value", + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint="read_endpoint_value", + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key="customer_managed_key_value", + suspension_reasons=[ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], + ) + ) await client.get_instance(request=None) # Establish that the underlying stub method was called. @@ -4113,12 +4592,10 @@ async def test_create_instance_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.create_instance(request=None) @@ -4140,12 +4617,10 @@ async def test_update_instance_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.update_instance(request=None) @@ -4167,12 +4642,10 @@ async def test_delete_instance_empty_call_grpc_asyncio(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') + operations_pb2.Operation(name="operations/spam") ) await client.delete_instance(request=None) @@ -4193,18 +4666,20 @@ def test_transport_kind_rest(): def test_list_instances_rest_bad_request(request_type=cloud_redis.ListInstancesRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -4213,26 +4688,28 @@ def test_list_instances_rest_bad_request(request_type=cloud_redis.ListInstancesR client.list_instances(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.ListInstancesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ListInstancesRequest, + dict, + ], +) def test_list_instances_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -4242,34 +4719,46 @@ def test_list_instances_rest_call_success(request_type): # Convert return value to protobuf type return_value = cloud_redis.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_instances(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_instances_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_list_instances") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_list_instances" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_list_instances" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) + pb_message = cloud_redis.ListInstancesRequest.pb( + cloud_redis.ListInstancesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4280,11 +4769,13 @@ def test_list_instances_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) + return_value = cloud_redis.ListInstancesResponse.to_json( + cloud_redis.ListInstancesResponse() + ) req.return_value.content = return_value request = cloud_redis.ListInstancesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -4292,7 +4783,13 @@ def test_list_instances_rest_interceptors(null_interceptor): post.return_value = cloud_redis.ListInstancesResponse() post_with_metadata.return_value = cloud_redis.ListInstancesResponse(), metadata - client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -4301,18 +4798,20 @@ def test_list_instances_rest_interceptors(null_interceptor): def test_get_instance_rest_bad_request(request_type=cloud_redis.GetInstanceRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -4321,51 +4820,55 @@ def test_get_instance_rest_bad_request(request_type=cloud_redis.GetInstanceReque client.get_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.GetInstanceRequest, + dict, + ], +) def test_get_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", + port=453, + current_location_id="current_location_id_value", + state=cloud_redis.Instance.State.CREATING, + status_message="status_message_value", + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint="read_endpoint_value", + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key="customer_managed_key_value", + suspension_reasons=[ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) # Wrap the value into a proper Response obj @@ -4375,55 +4878,75 @@ def test_get_instance_rest_call_success(request_type): # Convert return value to protobuf type return_value = cloud_redis.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance(request) # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True - assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert ( + response.transit_encryption_mode + == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + ) assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 - assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' - assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert ( + response.read_replicas_mode + == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + ) + assert response.customer_managed_key == "customer_managed_key_value" + assert response.suspension_reasons == [ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == [ + "available_maintenance_versions_value" + ] @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_get_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_get_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -4442,7 +4965,7 @@ def test_get_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.GetInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -4450,27 +4973,37 @@ def test_get_instance_rest_interceptors(null_interceptor): post.return_value = cloud_redis.Instance() post_with_metadata.return_value = cloud_redis.Instance(), metadata - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_create_instance_rest_bad_request(request_type=cloud_redis.CreateInstanceRequest): +def test_create_instance_rest_bad_request( + request_type=cloud_redis.CreateInstanceRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -4479,19 +5012,94 @@ def test_create_instance_rest_bad_request(request_type=cloud_redis.CreateInstanc client.create_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.CreateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.CreateInstanceRequest, + dict, + ], +) def test_create_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "labels": {}, + "location_id": "location_id_value", + "alternative_location_id": "alternative_location_id_value", + "redis_version": "redis_version_value", + "reserved_ip_range": "reserved_ip_range_value", + "secondary_ip_range": "secondary_ip_range_value", + "host": "host_value", + "port": 453, + "current_location_id": "current_location_id_value", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status_message": "status_message_value", + "redis_configs": {}, + "tier": 1, + "memory_size_gb": 1499, + "authorized_network": "authorized_network_value", + "persistence_iam_identity": "persistence_iam_identity_value", + "connect_mode": 1, + "auth_enabled": True, + "server_ca_certs": [ + { + "serial_number": "serial_number_value", + "cert": "cert_value", + "create_time": {}, + "expire_time": {}, + "sha1_fingerprint": "sha1_fingerprint_value", + } + ], + "transit_encryption_mode": 1, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "can_reschedule": True, + "schedule_deadline_time": {}, + }, + "replica_count": 1384, + "nodes": [{"id": "id_value", "zone": "zone_value"}], + "read_endpoint": "read_endpoint_value", + "read_endpoint_port": 1920, + "read_replicas_mode": 1, + "customer_managed_key": "customer_managed_key_value", + "persistence_config": { + "persistence_mode": 1, + "rdb_snapshot_period": 3, + "rdb_next_snapshot_time": {}, + "rdb_snapshot_start_time": {}, + }, + "suspension_reasons": [1], + "maintenance_version": "maintenance_version_value", + "available_maintenance_versions": [ + "available_maintenance_versions_value1", + "available_maintenance_versions_value2", + ], + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -4511,7 +5119,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -4525,7 +5133,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -4540,12 +5148,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -4558,15 +5170,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_instance(request) @@ -4579,20 +5191,32 @@ def get_message_fields(field): def test_create_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_create_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_create_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_create_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) + pb_message = cloud_redis.CreateInstanceRequest.pb( + cloud_redis.CreateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4607,7 +5231,7 @@ def test_create_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.CreateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -4615,27 +5239,39 @@ def test_create_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_update_instance_rest_bad_request(request_type=cloud_redis.UpdateInstanceRequest): +def test_update_instance_rest_bad_request( + request_type=cloud_redis.UpdateInstanceRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -4644,19 +5280,96 @@ def test_update_instance_rest_bad_request(request_type=cloud_redis.UpdateInstanc client.update_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpdateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.UpdateInstanceRequest, + dict, + ], +) def test_update_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "display_name": "display_name_value", + "labels": {}, + "location_id": "location_id_value", + "alternative_location_id": "alternative_location_id_value", + "redis_version": "redis_version_value", + "reserved_ip_range": "reserved_ip_range_value", + "secondary_ip_range": "secondary_ip_range_value", + "host": "host_value", + "port": 453, + "current_location_id": "current_location_id_value", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status_message": "status_message_value", + "redis_configs": {}, + "tier": 1, + "memory_size_gb": 1499, + "authorized_network": "authorized_network_value", + "persistence_iam_identity": "persistence_iam_identity_value", + "connect_mode": 1, + "auth_enabled": True, + "server_ca_certs": [ + { + "serial_number": "serial_number_value", + "cert": "cert_value", + "create_time": {}, + "expire_time": {}, + "sha1_fingerprint": "sha1_fingerprint_value", + } + ], + "transit_encryption_mode": 1, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "can_reschedule": True, + "schedule_deadline_time": {}, + }, + "replica_count": 1384, + "nodes": [{"id": "id_value", "zone": "zone_value"}], + "read_endpoint": "read_endpoint_value", + "read_endpoint_port": 1920, + "read_replicas_mode": 1, + "customer_managed_key": "customer_managed_key_value", + "persistence_config": { + "persistence_mode": 1, + "rdb_snapshot_period": 3, + "rdb_next_snapshot_time": {}, + "rdb_snapshot_start_time": {}, + }, + "suspension_reasons": [1], + "maintenance_version": "maintenance_version_value", + "available_maintenance_versions": [ + "available_maintenance_versions_value1", + "available_maintenance_versions_value2", + ], + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -4676,7 +5389,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -4690,7 +5403,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -4705,12 +5418,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -4723,15 +5440,15 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_instance(request) @@ -4744,20 +5461,32 @@ def get_message_fields(field): def test_update_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_update_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_update_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_update_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_update_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) + pb_message = cloud_redis.UpdateInstanceRequest.pb( + cloud_redis.UpdateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4772,7 +5501,7 @@ def test_update_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.UpdateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -4780,27 +5509,37 @@ def test_update_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() -def test_delete_instance_rest_bad_request(request_type=cloud_redis.DeleteInstanceRequest): +def test_delete_instance_rest_bad_request( + request_type=cloud_redis.DeleteInstanceRequest, +): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = mock.Mock() @@ -4809,30 +5548,32 @@ def test_delete_instance_rest_bad_request(request_type=cloud_redis.DeleteInstanc client.delete_instance(request) -@pytest.mark.parametrize("request_type", [ - cloud_redis.DeleteInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.DeleteInstanceRequest, + dict, + ], +) def test_delete_instance_rest_call_success(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_instance(request) @@ -4845,20 +5586,32 @@ def test_delete_instance_rest_call_success(request_type): def test_delete_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.CloudRedisRestInterceptor(), + ) client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_delete_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_delete_instance" + ) as post, + mock.patch.object( + transports.CloudRedisRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.CloudRedisRestInterceptor, "pre_delete_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) + pb_message = cloud_redis.DeleteInstanceRequest.pb( + cloud_redis.DeleteInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4873,7 +5626,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): req.return_value.content = return_value request = cloud_redis.DeleteInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -4881,7 +5634,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() @@ -4894,13 +5653,18 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -4909,20 +5673,23 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq client.get_location(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) def test_get_location_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -4930,7 +5697,7 @@ def test_get_location_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -4941,19 +5708,24 @@ def test_get_location_rest(request_type): assert isinstance(response, locations_pb2.Location) -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -4962,20 +5734,23 @@ def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocation client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) def test_list_locations_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -4983,7 +5758,7 @@ def test_list_locations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -4994,19 +5769,26 @@ def test_list_locations_rest(request_type): assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -5015,28 +5797,31 @@ def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOpe client.cancel_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) def test_cancel_operation_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -5047,19 +5832,26 @@ def test_cancel_operation_rest(request_type): assert response is None -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -5068,28 +5860,31 @@ def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOpe client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) def test_delete_operation_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -5100,19 +5895,26 @@ def test_delete_operation_rest(request_type): assert response is None -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -5121,20 +5923,23 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) def test_get_operation_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -5142,7 +5947,7 @@ def test_get_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -5153,19 +5958,26 @@ def test_get_operation_rest(request_type): assert isinstance(response, operations_pb2.Operation) -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -5174,20 +5986,23 @@ def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperat client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) def test_list_operations_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -5195,7 +6010,7 @@ def test_list_operations_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -5206,19 +6021,26 @@ def test_list_operations_rest(request_type): assert isinstance(response, operations_pb2.ListOperationsResponse) -def test_wait_operation_rest_bad_request(request_type=operations_pb2.WaitOperationRequest): +def test_wait_operation_rest_bad_request( + request_type=operations_pb2.WaitOperationRequest, +): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = Response() - json_return_value = '' + json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 response_value.request = Request() @@ -5227,20 +6049,23 @@ def test_wait_operation_rest_bad_request(request_type=operations_pb2.WaitOperati client.wait_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.WaitOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.WaitOperationRequest, + dict, + ], +) def test_wait_operation_rest(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -5248,7 +6073,7 @@ def test_wait_operation_rest(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -5258,10 +6083,10 @@ def test_wait_operation_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + def test_initialize_client_w_rest(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) assert client is not None @@ -5275,9 +6100,7 @@ def test_list_instances_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: client.list_instances(request=None) # Establish that the underlying stub method was called. @@ -5297,9 +6120,7 @@ def test_get_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: client.get_instance(request=None) # Establish that the underlying stub method was called. @@ -5319,9 +6140,7 @@ def test_create_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: client.create_instance(request=None) # Establish that the underlying stub method was called. @@ -5341,9 +6160,7 @@ def test_update_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: client.update_instance(request=None) # Establish that the underlying stub method was called. @@ -5363,9 +6180,7 @@ def test_delete_instance_empty_call_rest(): ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: client.delete_instance(request=None) # Establish that the underlying stub method was called. @@ -5386,15 +6201,18 @@ def test_cloud_redis_rest_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AbstractOperationsClient, + operations_v1.AbstractOperationsClient, ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + def test_transport_kind_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = CloudRedisAsyncClient.get_transport_class("rest_asyncio")( credentials=async_anonymous_credentials() ) @@ -5402,22 +6220,28 @@ def test_transport_kind_rest_asyncio(): @pytest.mark.asyncio -async def test_list_instances_rest_asyncio_bad_request(request_type=cloud_redis.ListInstancesRequest): +async def test_list_instances_rest_asyncio_bad_request( + request_type=cloud_redis.ListInstancesRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -5426,28 +6250,32 @@ async def test_list_instances_rest_asyncio_bad_request(request_type=cloud_redis. @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.ListInstancesRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.ListInstancesRequest, + dict, + ], +) async def test_list_instances_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -5457,37 +6285,54 @@ async def test_list_instances_rest_asyncio_call_success(request_type): # Convert return value to protobuf type return_value = cloud_redis.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.list_instances(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_list_instances_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_list_instances") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_list_instances" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_list_instances_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_list_instances" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) + pb_message = cloud_redis.ListInstancesRequest.pb( + cloud_redis.ListInstancesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5498,11 +6343,13 @@ async def test_list_instances_rest_asyncio_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) + return_value = cloud_redis.ListInstancesResponse.to_json( + cloud_redis.ListInstancesResponse() + ) req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.ListInstancesRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -5510,29 +6357,42 @@ async def test_list_instances_rest_asyncio_interceptors(null_interceptor): post.return_value = cloud_redis.ListInstancesResponse() post_with_metadata.return_value = cloud_redis.ListInstancesResponse(), metadata - await client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.list_instances( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_get_instance_rest_asyncio_bad_request(request_type=cloud_redis.GetInstanceRequest): +async def test_get_instance_rest_asyncio_bad_request( + request_type=cloud_redis.GetInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -5541,53 +6401,59 @@ async def test_get_instance_rest_asyncio_bad_request(request_type=cloud_redis.Ge @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.GetInstanceRequest, + dict, + ], +) async def test_get_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", + port=453, + current_location_id="current_location_id_value", + state=cloud_redis.Instance.State.CREATING, + status_message="status_message_value", + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint="read_endpoint_value", + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key="customer_managed_key_value", + suspension_reasons=[ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) # Wrap the value into a proper Response obj @@ -5597,58 +6463,82 @@ async def test_get_instance_rest_asyncio_call_success(request_type): # Convert return value to protobuf type return_value = cloud_redis.Instance.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.get_instance(request) # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True - assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert ( + response.transit_encryption_mode + == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + ) assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 - assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' - assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert ( + response.read_replicas_mode + == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + ) + assert response.customer_managed_key == "customer_managed_key_value" + assert response.suspension_reasons == [ + cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE + ] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == [ + "available_maintenance_versions_value" + ] @pytest.mark.asyncio @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_get_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_get_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_get_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_get_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() @@ -5667,7 +6557,7 @@ async def test_get_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.GetInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -5675,29 +6565,42 @@ async def test_get_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = cloud_redis.Instance() post_with_metadata.return_value = cloud_redis.Instance(), metadata - await client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.get_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_create_instance_rest_asyncio_bad_request(request_type=cloud_redis.CreateInstanceRequest): +async def test_create_instance_rest_asyncio_bad_request( + request_type=cloud_redis.CreateInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -5706,21 +6609,98 @@ async def test_create_instance_rest_asyncio_bad_request(request_type=cloud_redis @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.CreateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.CreateInstanceRequest, + dict, + ], +) async def test_create_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["instance"] = { + "name": "name_value", + "display_name": "display_name_value", + "labels": {}, + "location_id": "location_id_value", + "alternative_location_id": "alternative_location_id_value", + "redis_version": "redis_version_value", + "reserved_ip_range": "reserved_ip_range_value", + "secondary_ip_range": "secondary_ip_range_value", + "host": "host_value", + "port": 453, + "current_location_id": "current_location_id_value", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status_message": "status_message_value", + "redis_configs": {}, + "tier": 1, + "memory_size_gb": 1499, + "authorized_network": "authorized_network_value", + "persistence_iam_identity": "persistence_iam_identity_value", + "connect_mode": 1, + "auth_enabled": True, + "server_ca_certs": [ + { + "serial_number": "serial_number_value", + "cert": "cert_value", + "create_time": {}, + "expire_time": {}, + "sha1_fingerprint": "sha1_fingerprint_value", + } + ], + "transit_encryption_mode": 1, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "can_reschedule": True, + "schedule_deadline_time": {}, + }, + "replica_count": 1384, + "nodes": [{"id": "id_value", "zone": "zone_value"}], + "read_endpoint": "read_endpoint_value", + "read_endpoint_port": 1920, + "read_replicas_mode": 1, + "customer_managed_key": "customer_managed_key_value", + "persistence_config": { + "persistence_mode": 1, + "rdb_snapshot_period": 3, + "rdb_next_snapshot_time": {}, + "rdb_snapshot_start_time": {}, + }, + "suspension_reasons": [1], + "maintenance_version": "maintenance_version_value", + "available_maintenance_versions": [ + "available_maintenance_versions_value1", + "available_maintenance_versions_value2", + ], + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -5740,7 +6720,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -5754,7 +6734,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -5769,12 +6749,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -5787,15 +6771,17 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.create_instance(request) @@ -5808,23 +6794,38 @@ def get_message_fields(field): @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_create_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_create_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_create_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_create_instance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_create_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) + pb_message = cloud_redis.CreateInstanceRequest.pb( + cloud_redis.CreateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5839,7 +6840,7 @@ async def test_create_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.CreateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -5847,29 +6848,44 @@ async def test_create_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - await client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.create_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_update_instance_rest_asyncio_bad_request(request_type=cloud_redis.UpdateInstanceRequest): +async def test_update_instance_rest_asyncio_bad_request( + request_type=cloud_redis.UpdateInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -5878,21 +6894,100 @@ async def test_update_instance_rest_asyncio_bad_request(request_type=cloud_redis @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpdateInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.UpdateInstanceRequest, + dict, + ], +) async def test_update_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + request_init = { + "instance": {"name": "projects/sample1/locations/sample2/instances/sample3"} + } + request_init["instance"] = { + "name": "projects/sample1/locations/sample2/instances/sample3", + "display_name": "display_name_value", + "labels": {}, + "location_id": "location_id_value", + "alternative_location_id": "alternative_location_id_value", + "redis_version": "redis_version_value", + "reserved_ip_range": "reserved_ip_range_value", + "secondary_ip_range": "secondary_ip_range_value", + "host": "host_value", + "port": 453, + "current_location_id": "current_location_id_value", + "create_time": {"seconds": 751, "nanos": 543}, + "state": 1, + "status_message": "status_message_value", + "redis_configs": {}, + "tier": 1, + "memory_size_gb": 1499, + "authorized_network": "authorized_network_value", + "persistence_iam_identity": "persistence_iam_identity_value", + "connect_mode": 1, + "auth_enabled": True, + "server_ca_certs": [ + { + "serial_number": "serial_number_value", + "cert": "cert_value", + "create_time": {}, + "expire_time": {}, + "sha1_fingerprint": "sha1_fingerprint_value", + } + ], + "transit_encryption_mode": 1, + "maintenance_policy": { + "create_time": {}, + "update_time": {}, + "description": "description_value", + "weekly_maintenance_window": [ + { + "day": 1, + "start_time": { + "hours": 561, + "minutes": 773, + "seconds": 751, + "nanos": 543, + }, + "duration": {"seconds": 751, "nanos": 543}, + } + ], + }, + "maintenance_schedule": { + "start_time": {}, + "end_time": {}, + "can_reschedule": True, + "schedule_deadline_time": {}, + }, + "replica_count": 1384, + "nodes": [{"id": "id_value", "zone": "zone_value"}], + "read_endpoint": "read_endpoint_value", + "read_endpoint_port": 1920, + "read_replicas_mode": 1, + "customer_managed_key": "customer_managed_key_value", + "persistence_config": { + "persistence_mode": 1, + "rdb_snapshot_period": 3, + "rdb_next_snapshot_time": {}, + "rdb_snapshot_start_time": {}, + }, + "suspension_reasons": [1], + "maintenance_version": "maintenance_version_value", + "available_maintenance_versions": [ + "available_maintenance_versions_value1", + "available_maintenance_versions_value2", + ], + } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 @@ -5912,7 +7007,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -5926,7 +7021,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -5941,12 +7036,16 @@ def get_message_fields(field): for subfield in result.keys(): if (field, subfield) not in runtime_nested_fields: subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } ) # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -5959,15 +7058,17 @@ def get_message_fields(field): request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.update_instance(request) @@ -5980,23 +7081,38 @@ def get_message_fields(field): @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_update_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_update_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_update_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_update_instance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_update_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) + pb_message = cloud_redis.UpdateInstanceRequest.pb( + cloud_redis.UpdateInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6011,7 +7127,7 @@ async def test_update_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.UpdateInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -6019,29 +7135,42 @@ async def test_update_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - await client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.update_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_delete_instance_rest_asyncio_bad_request(request_type=cloud_redis.DeleteInstanceRequest): +async def test_delete_instance_rest_asyncio_bad_request( + request_type=cloud_redis.DeleteInstanceRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value @@ -6050,32 +7179,38 @@ async def test_delete_instance_rest_asyncio_bad_request(request_type=cloud_redis @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - cloud_redis.DeleteInstanceRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + cloud_redis.DeleteInstanceRequest, + dict, + ], +) async def test_delete_instance_rest_asyncio_call_success(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: + with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.delete_instance(request) @@ -6088,23 +7223,38 @@ async def test_delete_instance_rest_asyncio_call_success(request_type): @pytest.mark.parametrize("null_interceptor", [True, False]) async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) transport = transports.AsyncCloudRedisRestTransport( credentials=async_anonymous_credentials(), - interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), - ) + interceptor=None + if null_interceptor + else transports.AsyncCloudRedisRestInterceptor(), + ) client = CloudRedisAsyncClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_delete_instance") as post, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_delete_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_delete_instance") as pre: + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "post_delete_instance" + ) as post, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, + "post_delete_instance_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AsyncCloudRedisRestInterceptor, "pre_delete_instance" + ) as pre, + ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) + pb_message = cloud_redis.DeleteInstanceRequest.pb( + cloud_redis.DeleteInstanceRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6119,7 +7269,7 @@ async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): req.return_value.read = mock.AsyncMock(return_value=return_value) request = cloud_redis.DeleteInstanceRequest() - metadata =[ + metadata = [ ("key", "val"), ("cephalopod", "squid"), ] @@ -6127,51 +7277,73 @@ async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - await client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + await client.delete_instance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) pre.assert_called_once() post.assert_called_once() post_with_metadata.assert_called_once() + @pytest.mark.asyncio -async def test_get_location_rest_asyncio_bad_request(request_type=locations_pb2.GetLocationRequest): +async def test_get_location_rest_asyncio_bad_request( + request_type=locations_pb2.GetLocationRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.get_location(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) async def test_get_location_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.Location() @@ -6179,7 +7351,9 @@ async def test_get_location_rest_asyncio(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6189,45 +7363,59 @@ async def test_get_location_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.Location) + @pytest.mark.asyncio -async def test_list_locations_rest_asyncio_bad_request(request_type=locations_pb2.ListLocationsRequest): +async def test_list_locations_rest_asyncio_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + request = json_format.ParseDict({"name": "projects/sample1"}, request) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.list_locations(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) async def test_list_locations_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1'} + request_init = {"name": "projects/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = locations_pb2.ListLocationsResponse() @@ -6235,7 +7423,9 @@ async def test_list_locations_rest_asyncio(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6245,53 +7435,71 @@ async def test_list_locations_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) + @pytest.mark.asyncio -async def test_cancel_operation_rest_asyncio_bad_request(request_type=operations_pb2.CancelOperationRequest): +async def test_cancel_operation_rest_asyncio_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.cancel_operation(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) async def test_cancel_operation_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + json_return_value = "{}" + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6301,53 +7509,71 @@ async def test_cancel_operation_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio -async def test_delete_operation_rest_asyncio_bad_request(request_type=operations_pb2.DeleteOperationRequest): +async def test_delete_operation_rest_asyncio_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.delete_operation(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) async def test_delete_operation_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = '{}' - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + json_return_value = "{}" + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6357,45 +7583,61 @@ async def test_delete_operation_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio -async def test_get_operation_rest_asyncio_bad_request(request_type=operations_pb2.GetOperationRequest): +async def test_get_operation_rest_asyncio_bad_request( + request_type=operations_pb2.GetOperationRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.get_operation(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) async def test_get_operation_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -6403,7 +7645,9 @@ async def test_get_operation_rest_asyncio(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6413,45 +7657,61 @@ async def test_get_operation_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio -async def test_list_operations_rest_asyncio_bad_request(request_type=operations_pb2.ListOperationsRequest): +async def test_list_operations_rest_asyncio_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.list_operations(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) async def test_list_operations_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2'} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse() @@ -6459,7 +7719,9 @@ async def test_list_operations_rest_asyncio(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6469,45 +7731,61 @@ async def test_list_operations_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + @pytest.mark.asyncio -async def test_wait_operation_rest_asyncio_bad_request(request_type=operations_pb2.WaitOperationRequest): +async def test_wait_operation_rest_asyncio_bad_request( + request_type=operations_pb2.WaitOperationRequest, +): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + with ( + mock.patch.object(AsyncAuthorizedSession, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): # Wrap the value into a proper Response obj response_value = mock.Mock() - response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.read = mock.AsyncMock(return_value=b"{}") response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.wait_operation(request) + @pytest.mark.asyncio -@pytest.mark.parametrize("request_type", [ - operations_pb2.WaitOperationRequest, - dict, -]) +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.WaitOperationRequest, + dict, + ], +) async def test_wait_operation_rest_asyncio(request_type): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. - with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + with mock.patch.object(AsyncAuthorizedSession, "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation() @@ -6515,7 +7793,9 @@ async def test_wait_operation_rest_asyncio(request_type): response_value = mock.Mock() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) - response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} @@ -6525,12 +7805,14 @@ async def test_wait_operation_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + def test_initialize_client_w_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) assert client is not None @@ -6540,16 +7822,16 @@ def test_initialize_client_w_rest_asyncio(): @pytest.mark.asyncio async def test_list_instances_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: + with mock.patch.object(type(client.transport.list_instances), "__call__") as call: await client.list_instances(request=None) # Establish that the underlying stub method was called. @@ -6565,16 +7847,16 @@ async def test_list_instances_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_get_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.get_instance), "__call__") as call: await client.get_instance(request=None) # Establish that the underlying stub method was called. @@ -6590,16 +7872,16 @@ async def test_get_instance_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_create_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.create_instance), "__call__") as call: await client.create_instance(request=None) # Establish that the underlying stub method was called. @@ -6615,16 +7897,16 @@ async def test_create_instance_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_update_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.update_instance), "__call__") as call: await client.update_instance(request=None) # Establish that the underlying stub method was called. @@ -6640,16 +7922,16 @@ async def test_update_instance_empty_call_rest_asyncio(): @pytest.mark.asyncio async def test_delete_instance_empty_call_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: + with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: await client.delete_instance(request=None) # Establish that the underlying stub method was called. @@ -6662,7 +7944,9 @@ async def test_delete_instance_empty_call_rest_asyncio(): def test_cloud_redis_rest_asyncio_lro_client(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", @@ -6672,22 +7956,28 @@ def test_cloud_redis_rest_asyncio_lro_client(): # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, -operations_v1.AsyncOperationsRestClient, + operations_v1.AsyncOperationsRestClient, ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + def test_unsupported_parameter_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) options = client_options.ClientOptions(quota_project_id="octopus") - with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore + with pytest.raises( + core_exceptions.AsyncRestUnsupportedParameterError, + match="google.api_core.client_options.ClientOptions.quota_project_id", + ) as exc: # type: ignore client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", - client_options=options - ) + client_options=options, + ) def test_transport_grpc_default(): @@ -6700,18 +7990,21 @@ def test_transport_grpc_default(): transports.CloudRedisGrpcTransport, ) + def test_cloud_redis_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.CloudRedisTransport( credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + credentials_file="credentials.json", ) def test_cloud_redis_base_transport(): # Instantiate the base transport. - with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__') as Transport: + with mock.patch( + "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__" + ) as Transport: Transport.return_value = None transport = transports.CloudRedisTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -6720,18 +8013,18 @@ def test_cloud_redis_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - 'list_instances', - 'get_instance', - 'create_instance', - 'update_instance', - 'delete_instance', - 'get_location', - 'list_locations', - 'get_operation', - 'wait_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + "list_instances", + "get_instance", + "create_instance", + "update_instance", + "delete_instance", + "get_location", + "list_locations", + "get_operation", + "wait_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -6747,7 +8040,7 @@ def test_cloud_redis_base_transport(): # Catch all for all remaining methods and properties remainder = [ - 'kind', + "kind", ] for r in remainder: with pytest.raises(NotImplementedError): @@ -6756,25 +8049,36 @@ def test_cloud_redis_base_transport(): def test_cloud_redis_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudRedisTransport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", + load_creds.assert_called_once_with( + "credentials.json", scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_cloud_redis_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages" + ) as Transport, + ): Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudRedisTransport() @@ -6783,14 +8087,12 @@ def test_cloud_redis_base_transport_with_adc(): def test_cloud_redis_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) CloudRedisClient() adc.assert_called_once_with( scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @@ -6805,12 +8107,12 @@ def test_cloud_redis_auth_adc(): def test_cloud_redis_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @@ -6824,48 +8126,46 @@ def test_cloud_redis_transport_auth_adc(transport_class): ], ) def test_cloud_redis_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: + with mock.patch.object(google.auth, "default", autospec=True) as adc: gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) adc.return_value = (gdch_mock, None) transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + gdch_mock.with_gdch_audience.assert_called_once_with(e) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.CloudRedisGrpcTransport, grpc_helpers), - (transports.CloudRedisGrpcAsyncIOTransport, grpc_helpers_async) + (transports.CloudRedisGrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "redis.googleapis.com:443", credentials=creds, credentials_file=None, quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=["1", "2"], default_host="redis.googleapis.com", ssl_credentials=None, @@ -6876,10 +8176,11 @@ def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): ) -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport], +) +def test_cloud_redis_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. @@ -6888,7 +8189,7 @@ def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( transport_class( host="squid.clam.whelk", credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds + ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", @@ -6909,61 +8210,77 @@ def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key + certificate_chain=expected_cert, private_key=expected_key ) + def test_cloud_redis_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.CloudRedisRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudRedisRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_cloud_redis_host_no_port(transport_name): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com'), - transport=transport_name, + client_options=client_options.ClientOptions( + api_endpoint="redis.googleapis.com" + ), + transport=transport_name, ) assert client.transport._host == ( - 'redis.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://redis.googleapis.com' + "redis.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://redis.googleapis.com" ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) def test_cloud_redis_host_with_port(transport_name): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com:8000'), + client_options=client_options.ClientOptions( + api_endpoint="redis.googleapis.com:8000" + ), transport=transport_name, ) assert client.transport._host == ( - 'redis.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://redis.googleapis.com:8000' + "redis.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://redis.googleapis.com:8000" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) def test_cloud_redis_client_transport_session_collision(transport_name): creds1 = ga_credentials.AnonymousCredentials() creds2 = ga_credentials.AnonymousCredentials() @@ -6990,8 +8307,10 @@ def test_cloud_redis_client_transport_session_collision(transport_name): session1 = client1.transport.delete_instance._session session2 = client2.transport.delete_instance._session assert session1 != session2 + + def test_cloud_redis_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudRedisGrpcTransport( @@ -7004,7 +8323,7 @@ def test_cloud_redis_grpc_transport_channel(): def test_cloud_redis_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudRedisGrpcAsyncIOTransport( @@ -7019,12 +8338,17 @@ def test_cloud_redis_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.filterwarnings("ignore::FutureWarning") -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: +@pytest.mark.parametrize( + "transport_class", + [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport], +) +def test_cloud_redis_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -7033,7 +8357,7 @@ def test_cloud_redis_transport_channel_mtls_with_client_cert_source( cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -7063,17 +8387,20 @@ def test_cloud_redis_transport_channel_mtls_with_client_cert_source( # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_transport_channel_mtls_with_adc( - transport_class -): +@pytest.mark.parametrize( + "transport_class", + [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport], +) +def test_cloud_redis_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -7104,7 +8431,7 @@ def test_cloud_redis_transport_channel_mtls_with_adc( def test_cloud_redis_grpc_lro_client(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport="grpc", ) transport = client.transport @@ -7121,7 +8448,7 @@ def test_cloud_redis_grpc_lro_client(): def test_cloud_redis_grpc_lro_async_client(): client = CloudRedisAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', + transport="grpc_asyncio", ) transport = client.transport @@ -7139,7 +8466,11 @@ def test_instance_path(): project = "squid" location = "clam" instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + expected = "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) actual = CloudRedisClient.instance_path(project, location, instance) assert expected == actual @@ -7156,9 +8487,12 @@ def test_parse_instance_path(): actual = CloudRedisClient.parse_instance_path(path) assert expected == actual + def test_common_billing_account_path(): billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) actual = CloudRedisClient.common_billing_account_path(billing_account) assert expected == actual @@ -7173,9 +8507,12 @@ def test_parse_common_billing_account_path(): actual = CloudRedisClient.parse_common_billing_account_path(path) assert expected == actual + def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) + expected = "folders/{folder}".format( + folder=folder, + ) actual = CloudRedisClient.common_folder_path(folder) assert expected == actual @@ -7190,9 +8527,12 @@ def test_parse_common_folder_path(): actual = CloudRedisClient.parse_common_folder_path(path) assert expected == actual + def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = CloudRedisClient.common_organization_path(organization) assert expected == actual @@ -7207,9 +8547,12 @@ def test_parse_common_organization_path(): actual = CloudRedisClient.parse_common_organization_path(path) assert expected == actual + def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project, ) + expected = "projects/{project}".format( + project=project, + ) actual = CloudRedisClient.common_project_path(project) assert expected == actual @@ -7224,10 +8567,14 @@ def test_parse_common_project_path(): actual = CloudRedisClient.parse_common_project_path(path) assert expected == actual + def test_common_location_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) actual = CloudRedisClient.common_location_path(project, location) assert expected == actual @@ -7247,14 +8594,18 @@ def test_parse_common_location_path(): def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.CloudRedisTransport, "_prep_wrapped_messages" + ) as prep: client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + with mock.patch.object( + transports.CloudRedisTransport, "_prep_wrapped_messages" + ) as prep: transport_class = CloudRedisClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), @@ -7265,7 +8616,8 @@ def test_client_with_default_client_info(): def test_delete_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7285,10 +8637,12 @@ def test_delete_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7298,9 +8652,7 @@ async def test_delete_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7323,7 +8675,7 @@ def test_delete_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.delete_operation(request) # Establish that the underlying gRPC stub method was called. @@ -7333,7 +8685,11 @@ def test_delete_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): @@ -7348,9 +8704,7 @@ async def test_delete_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7359,7 +8713,10 @@ async def test_delete_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_delete_operation_from_dict(): @@ -7378,6 +8735,7 @@ def test_delete_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = CloudRedisAsyncClient( @@ -7386,9 +8744,7 @@ async def test_delete_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.delete_operation( request={ "name": "locations", @@ -7412,6 +8768,7 @@ def test_delete_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.DeleteOperationRequest() + @pytest.mark.asyncio async def test_delete_operation_flattened_async(): client = CloudRedisAsyncClient( @@ -7420,9 +8777,7 @@ async def test_delete_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7432,7 +8787,8 @@ async def test_delete_operation_flattened_async(): def test_cancel_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7452,10 +8808,12 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None + @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7465,9 +8823,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7490,7 +8846,7 @@ def test_cancel_operation_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None + call.return_value = None client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. @@ -7500,7 +8856,11 @@ def test_cancel_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): @@ -7515,9 +8875,7 @@ async def test_cancel_operation_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7526,7 +8884,10 @@ async def test_cancel_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_cancel_operation_from_dict(): @@ -7545,6 +8906,7 @@ def test_cancel_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = CloudRedisAsyncClient( @@ -7553,9 +8915,7 @@ async def test_cancel_operation_from_dict_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) response = await client.cancel_operation( request={ "name": "locations", @@ -7579,6 +8939,7 @@ def test_cancel_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.CancelOperationRequest() + @pytest.mark.asyncio async def test_cancel_operation_flattened_async(): client = CloudRedisAsyncClient( @@ -7587,9 +8948,7 @@ async def test_cancel_operation_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_operation() # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7599,7 +8958,8 @@ async def test_cancel_operation_flattened_async(): def test_wait_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7619,10 +8979,12 @@ def test_wait_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_wait_operation(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7667,7 +9029,11 @@ def test_wait_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_wait_operation_field_headers_async(): @@ -7693,7 +9059,10 @@ async def test_wait_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_wait_operation_from_dict(): @@ -7712,6 +9081,7 @@ def test_wait_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_wait_operation_from_dict_async(): client = CloudRedisAsyncClient( @@ -7746,6 +9116,7 @@ def test_wait_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.WaitOperationRequest() + @pytest.mark.asyncio async def test_wait_operation_flattened_async(): client = CloudRedisAsyncClient( @@ -7766,7 +9137,8 @@ async def test_wait_operation_flattened_async(): def test_get_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7786,10 +9158,12 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) + @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7834,7 +9208,11 @@ def test_get_operation_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_operation_field_headers_async(): @@ -7860,7 +9238,10 @@ async def test_get_operation_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_get_operation_from_dict(): @@ -7879,6 +9260,7 @@ def test_get_operation_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = CloudRedisAsyncClient( @@ -7913,6 +9295,7 @@ def test_get_operation_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.GetOperationRequest() + @pytest.mark.asyncio async def test_get_operation_flattened_async(): client = CloudRedisAsyncClient( @@ -7933,7 +9316,8 @@ async def test_get_operation_flattened_async(): def test_list_operations(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7953,10 +9337,12 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8001,7 +9387,11 @@ def test_list_operations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_operations_field_headers_async(): @@ -8027,7 +9417,10 @@ async def test_list_operations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_operations_from_dict(): @@ -8046,6 +9439,7 @@ def test_list_operations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = CloudRedisAsyncClient( @@ -8080,6 +9474,7 @@ def test_list_operations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == operations_pb2.ListOperationsRequest() + @pytest.mark.asyncio async def test_list_operations_flattened_async(): client = CloudRedisAsyncClient( @@ -8100,7 +9495,8 @@ async def test_list_operations_flattened_async(): def test_list_locations(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8120,10 +9516,12 @@ def test_list_locations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) + @pytest.mark.asyncio async def test_list_locations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8168,7 +9566,11 @@ def test_list_locations_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_list_locations_field_headers_async(): @@ -8194,7 +9596,10 @@ async def test_list_locations_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] def test_list_locations_from_dict(): @@ -8213,6 +9618,7 @@ def test_list_locations_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_list_locations_from_dict_async(): client = CloudRedisAsyncClient( @@ -8247,6 +9653,7 @@ def test_list_locations_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == locations_pb2.ListLocationsRequest() + @pytest.mark.asyncio async def test_list_locations_flattened_async(): client = CloudRedisAsyncClient( @@ -8267,7 +9674,8 @@ async def test_list_locations_flattened_async(): def test_get_location(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8287,10 +9695,12 @@ def test_get_location(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.Location) + @pytest.mark.asyncio async def test_get_location_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, + credentials=async_anonymous_credentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8314,8 +9724,7 @@ async def test_get_location_async(transport: str = "grpc_asyncio"): def test_get_location_field_headers(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials()) + client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials()) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -8334,13 +9743,15 @@ def test_get_location_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + @pytest.mark.asyncio async def test_get_location_field_headers_async(): - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials() - ) + client = CloudRedisAsyncClient(credentials=async_anonymous_credentials()) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -8360,7 +9771,10 @@ async def test_get_location_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] def test_get_location_from_dict(): @@ -8379,6 +9793,7 @@ def test_get_location_from_dict(): ) call.assert_called() + @pytest.mark.asyncio async def test_get_location_from_dict_async(): client = CloudRedisAsyncClient( @@ -8413,6 +9828,7 @@ def test_get_location_flattened(): _, args, _ = call.mock_calls[0] assert args[0] == locations_pb2.GetLocationRequest() + @pytest.mark.asyncio async def test_get_location_flattened_async(): client = CloudRedisAsyncClient( @@ -8433,10 +9849,11 @@ async def test_get_location_flattened_async(): def test_transport_close_grpc(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -8445,10 +9862,11 @@ def test_transport_close_grpc(): @pytest.mark.asyncio async def test_transport_close_grpc_asyncio(): client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -8456,10 +9874,11 @@ async def test_transport_close_grpc_asyncio(): def test_transport_close_rest(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: with client: close.assert_not_called() close.assert_called_once() @@ -8468,12 +9887,15 @@ def test_transport_close_rest(): @pytest.mark.asyncio async def test_transport_close_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: - pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + pytest.skip( + "the library must be installed with the `async_rest` extra to test this feature." + ) client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="rest_asyncio" + credentials=async_anonymous_credentials(), transport="rest_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: async with client: close.assert_not_called() close.assert_called_once() @@ -8481,13 +9903,12 @@ async def test_transport_close_rest_asyncio(): def test_client_ctx(): transports = [ - 'rest', - 'grpc', + "rest", + "grpc", ] for transport in transports: client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: @@ -8496,10 +9917,14 @@ def test_client_ctx(): pass close.assert_called() -@pytest.mark.parametrize("client_class,transport_class", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport), -]) + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CloudRedisClient, transports.CloudRedisGrpcTransport), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport), + ], +) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True @@ -8514,7 +9939,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py index 5d1f49a4daca..8a2c48c29b9c 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py @@ -51,4 +51,5 @@ async def sample_list_resources(): async for response in page_result: print(response) + # [END mollusca_v1_generated_Snippets_ListResources_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py index 63ead5078e2c..9e53ca000604 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py @@ -51,4 +51,5 @@ def sample_list_resources(): for response in page_result: print(response) + # [END mollusca_v1_generated_Snippets_ListResources_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py index d217b7b36e69..07f184d87fd1 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py @@ -60,4 +60,5 @@ def request_generator(): async for response in stream: print(response) + # [END mollusca_v1_generated_Snippets_MethodBidiStreaming_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py index 5fe826cddf2c..e7004679066a 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py @@ -60,4 +60,5 @@ def request_generator(): for response in stream: print(response) + # [END mollusca_v1_generated_Snippets_MethodBidiStreaming_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py index 20174fd710e1..c5feacb8a757 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py @@ -61,4 +61,5 @@ async def sample_method_lro_signatures(): # Handle the response print(response) + # [END mollusca_v1_generated_Snippets_MethodLroSignatures_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py index 5e126783b114..cff972d37a7f 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py @@ -61,4 +61,5 @@ def sample_method_lro_signatures(): # Handle the response print(response) + # [END mollusca_v1_generated_Snippets_MethodLroSignatures_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py index 7401848a032e..029dcb4d8000 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py @@ -57,4 +57,5 @@ async def sample_method_one_signature(): # Handle the response print(response) + # [END mollusca_v1_generated_Snippets_MethodOneSignature_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py index a0115000ae20..02407670938c 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py @@ -57,4 +57,5 @@ def sample_method_one_signature(): # Handle the response print(response) + # [END mollusca_v1_generated_Snippets_MethodOneSignature_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py index 2c7308dc72f2..cc1d8c1e93c1 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py @@ -58,4 +58,5 @@ async def sample_method_server_streaming(): async for response in stream: print(response) + # [END mollusca_v1_generated_Snippets_MethodServerStreaming_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py index f08e0bbb6b76..64270643f799 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py @@ -58,4 +58,5 @@ def sample_method_server_streaming(): for response in stream: print(response) + # [END mollusca_v1_generated_Snippets_MethodServerStreaming_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py index 8077748eac63..29c635c386e3 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py @@ -50,4 +50,5 @@ async def sample_one_of_method(): # Handle the response print(response) + # [END mollusca_v1_generated_Snippets_OneOfMethod_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py index da47d3740b3b..4f4d9dfaf7f9 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py @@ -50,4 +50,5 @@ async def sample_one_of_method_required_field(): # Handle the response print(response) + # [END mollusca_v1_generated_Snippets_OneOfMethodRequiredField_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py index 13727405f18c..069c1f8df36d 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py @@ -50,4 +50,5 @@ def sample_one_of_method_required_field(): # Handle the response print(response) + # [END mollusca_v1_generated_Snippets_OneOfMethodRequiredField_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py index 60dbe379904f..b510062075fb 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py @@ -50,4 +50,5 @@ def sample_one_of_method(): # Handle the response print(response) + # [END mollusca_v1_generated_Snippets_OneOfMethod_sync] diff --git a/packages/gapic-generator/tests/snippetgen/test_snippetgen.py b/packages/gapic-generator/tests/snippetgen/test_snippetgen.py index 389e7c5334c5..a8dd22d205f8 100644 --- a/packages/gapic-generator/tests/snippetgen/test_snippetgen.py +++ b/packages/gapic-generator/tests/snippetgen/test_snippetgen.py @@ -13,15 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from pathlib import Path import shutil import subprocess import sys import tempfile +from pathlib import Path import pytest - CURRENT_DIRECTORY = Path(__file__).parent.absolute() REPO_ROOT = CURRENT_DIRECTORY.parent.parent diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 180e48b8d59a..fe76d3e5ed2c 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -13,14 +13,13 @@ # limitations under the License. -import grpc -from unittest import mock import os -import pytest -import pytest_asyncio - from typing import Sequence, Tuple +from unittest import mock +import grpc +import pytest +import pytest_asyncio from google.api_core.client_options import ClientOptions # type: ignore from google.showcase_v1beta1.services.echo.transports import EchoRestInterceptor @@ -33,20 +32,18 @@ HAS_GOOGLE_AUTH_AIO = False import google.auth from google.auth import credentials as ga_credentials -from google.showcase import EchoClient -from google.showcase import IdentityClient -from google.showcase import MessagingClient +from google.showcase import EchoClient, IdentityClient, MessagingClient if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": - from grpc.experimental import aio import asyncio - from google.showcase import EchoAsyncClient - from google.showcase import IdentityAsyncClient + + from google.showcase import EchoAsyncClient, IdentityAsyncClient + from grpc.experimental import aio try: from google.showcase_v1beta1.services.echo.transports import ( - AsyncEchoRestTransport, AsyncEchoRestInterceptor, + AsyncEchoRestTransport, ) HAS_ASYNC_REST_ECHO_TRANSPORT = True diff --git a/packages/gapic-generator/tests/system/test_client_context_manager.py b/packages/gapic-generator/tests/system/test_client_context_manager.py index 59e98183b8d8..675057254b74 100644 --- a/packages/gapic-generator/tests/system/test_client_context_manager.py +++ b/packages/gapic-generator/tests/system/test_client_context_manager.py @@ -13,8 +13,9 @@ # limitations under the License. import os -import pytest + import grpc +import pytest from google.auth import exceptions diff --git a/packages/gapic-generator/tests/system/test_error_details.py b/packages/gapic-generator/tests/system/test_error_details.py index 35cb21287ca5..9ed745c5305e 100644 --- a/packages/gapic-generator/tests/system/test_error_details.py +++ b/packages/gapic-generator/tests/system/test_error_details.py @@ -14,10 +14,10 @@ import pytest from google import showcase -from google.rpc import error_details_pb2 +from google.api_core import exceptions from google.protobuf import any_pb2 +from google.rpc import error_details_pb2 from grpc_status import rpc_status -from google.api_core import exceptions def create_status(error_details=None): diff --git a/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py index dca865a571e3..4a9f4a438598 100644 --- a/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py @@ -14,7 +14,6 @@ from google import showcase - # intercetped_metadata will be added by the interceptor automatically, and # showcase server will echo it (since it has key 'showcase-trailer') as trailing # metadata. diff --git a/packages/gapic-generator/tests/system/test_lro.py b/packages/gapic-generator/tests/system/test_lro.py index c5722ed27ded..81433b48e138 100644 --- a/packages/gapic-generator/tests/system/test_lro.py +++ b/packages/gapic-generator/tests/system/test_lro.py @@ -13,9 +13,9 @@ # limitations under the License. import os -import pytest from datetime import datetime, timedelta, timezone +import pytest from google import showcase diff --git a/packages/gapic-generator/tests/system/test_mixins.py b/packages/gapic-generator/tests/system/test_mixins.py index 699e4534c5aa..628d77db8608 100644 --- a/packages/gapic-generator/tests/system/test_mixins.py +++ b/packages/gapic-generator/tests/system/test_mixins.py @@ -13,8 +13,8 @@ # limitations under the License. import os -import pytest +import pytest from google.api_core import exceptions diff --git a/packages/gapic-generator/tests/system/test_pagination.py b/packages/gapic-generator/tests/system/test_pagination.py index 4a341222f7ca..cd08b860ebc9 100644 --- a/packages/gapic-generator/tests/system/test_pagination.py +++ b/packages/gapic-generator/tests/system/test_pagination.py @@ -13,6 +13,7 @@ # limitations under the License. import os + import pytest from google import showcase diff --git a/packages/gapic-generator/tests/system/test_request_metadata.py b/packages/gapic-generator/tests/system/test_request_metadata.py index 76dc739cadba..07c53e340b48 100644 --- a/packages/gapic-generator/tests/system/test_request_metadata.py +++ b/packages/gapic-generator/tests/system/test_request_metadata.py @@ -13,7 +13,6 @@ # limitations under the License. import pytest - from google import showcase diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py index e92d6191a3ec..11172936e895 100644 --- a/packages/gapic-generator/tests/system/test_resource_crud.py +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -13,6 +13,7 @@ # limitations under the License. import os + import pytest diff --git a/packages/gapic-generator/tests/system/test_response_metadata.py b/packages/gapic-generator/tests/system/test_response_metadata.py index 2cc868145539..fe81f72c88c8 100644 --- a/packages/gapic-generator/tests/system/test_response_metadata.py +++ b/packages/gapic-generator/tests/system/test_response_metadata.py @@ -13,8 +13,8 @@ # limitations under the License. import os -import pytest +import pytest from google import showcase diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index 9af80d077367..709457b8519f 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -13,8 +13,8 @@ # limitations under the License. import os -import pytest +import pytest from google.api_core import exceptions from google.rpc import code_pb2 diff --git a/packages/gapic-generator/tests/system/test_streams.py b/packages/gapic-generator/tests/system/test_streams.py index e7fbc1c94bbc..7707931c652f 100644 --- a/packages/gapic-generator/tests/system/test_streams.py +++ b/packages/gapic-generator/tests/system/test_streams.py @@ -14,10 +14,10 @@ import logging import os -import pytest import threading -from google import showcase +import pytest +from google import showcase # `_METADATA` will be sent as part of the request, and the # showcase server will echo it (since it has key 'showcase-trailer') as trailing diff --git a/packages/gapic-generator/tests/system/test_unary.py b/packages/gapic-generator/tests/system/test_unary.py index 0d269b7195d8..cb1ca527d1e8 100644 --- a/packages/gapic-generator/tests/system/test_unary.py +++ b/packages/gapic-generator/tests/system/test_unary.py @@ -13,14 +13,13 @@ # limitations under the License. import os -import pytest import re +import pytest +from google import showcase from google.api_core import exceptions from google.rpc import code_pb2 -from google import showcase - UUID4_RE = r"[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}" diff --git a/packages/gapic-generator/tests/system/test_universe_domain.py b/packages/gapic-generator/tests/system/test_universe_domain.py index cd1c37e5b443..8227f5ee51b4 100644 --- a/packages/gapic-generator/tests/system/test_universe_domain.py +++ b/packages/gapic-generator/tests/system/test_universe_domain.py @@ -1,7 +1,6 @@ -import pytest - import google.auth import grpc +import pytest # Define the parametrized data vary_transport = [ diff --git a/packages/gapic-generator/tests/unit/common_types.py b/packages/gapic-generator/tests/unit/common_types.py index 4ea17c64296a..415b2e32d806 100644 --- a/packages/gapic-generator/tests/unit/common_types.py +++ b/packages/gapic-generator/tests/unit/common_types.py @@ -14,15 +14,12 @@ import dataclasses import itertools - from collections import namedtuple from typing import Any, Dict, Iterable, Optional from google.protobuf import descriptor_pb2 -from gapic.schema import metadata -from gapic.schema import wrappers - +from gapic.schema import metadata, wrappers from test_utils.test_utils import make_method # Injected dummy test types diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py index b0a9557f02fc..b90012b70f62 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py @@ -15,17 +15,18 @@ from pathlib import Path -from google.protobuf import json_format -from google.protobuf.compiler import plugin_pb2 import libcst import pytest +from google.protobuf import json_format +from google.protobuf.compiler import plugin_pb2 from gapic import utils -from gapic.configurable_snippetgen import configured_snippet -from gapic.configurable_snippetgen import snippet_config_language_pb2 +from gapic.configurable_snippetgen import ( + configured_snippet, + snippet_config_language_pb2, +) from gapic.schema import api - CURRENT_DIRECTORY = Path(__file__).parent.absolute() SPEECH_V1_REQUEST_PATH = CURRENT_DIRECTORY / "resources" / "speech" / "request.desc" CONFIG_JSON_PATH = ( diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py index 28a2779ab227..09efca0d9282 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py @@ -16,8 +16,7 @@ import libcst import pytest -from gapic.configurable_snippetgen import libcst_utils -from gapic.configurable_snippetgen import snippet_config_language_pb2 +from gapic.configurable_snippetgen import libcst_utils, snippet_config_language_pb2 def _assert_code_equal(node: libcst.CSTNode, code: str) -> str: diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py index b52b0fb2c62e..b2bf0a591cb2 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py @@ -15,13 +15,12 @@ from pathlib import Path -from google.protobuf.compiler import plugin_pb2 import pytest +from google.protobuf.compiler import plugin_pb2 from gapic import utils from gapic.schema import api - CURRENT_DIRECTORY = Path(__file__).parent.absolute() SPEECH_V1_REQUEST_PATH = CURRENT_DIRECTORY / "resources" / "speech" / "request.desc" diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 9d8545c4192f..bd8315cd2bca 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -19,32 +19,28 @@ import jinja2 import pytest - -from google.api import service_pb2 -from google.api import client_pb2 +from google.api import client_pb2, service_pb2 from google.protobuf import descriptor_pb2 from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse from gapic.generator import generator from gapic.samplegen_utils import snippet_metadata_pb2, types, yaml +from gapic.schema import api, naming, wrappers +from gapic.utils import Options + from ..common_types import ( DummyApiSchema, DummyField, DummyIdent, - DummyNaming, DummyMessage, DummyMessageTypePB, - DummyService, DummyMethod, - message_factory, + DummyNaming, + DummyService, enum_factory, + message_factory, ) -from gapic.schema import api -from gapic.schema import naming -from gapic.schema import wrappers -from gapic.utils import Options - def mock_generate_sample(*args, **kwargs): dummy_snippet_metadata = snippet_metadata_pb2.Snippet() diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 8b17aae911a6..afd610a8c0ec 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -13,10 +13,11 @@ # limitations under the License. import os -import pytest import re -from unittest import mock import warnings +from unittest import mock + +import pytest from gapic.samplegen_utils import types from gapic.utils import Options diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py index 3c516f9f3138..0ea41b827d35 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py @@ -56,6 +56,7 @@ def sample_classify(video, location): response = client.classify(request=request) # Handle the response - print(f"Mollusc is a \"{response.taxonomy}\"") + print(f'Mollusc is a "{response.taxonomy}"') + # [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py index 80848b821b0d..7c4f8fa7cd36 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py @@ -56,6 +56,7 @@ async def sample_classify(video, location): response = await client.classify(request=request) # Handle the response - print(f"Mollusc is a \"{response.taxonomy}\"") + print(f'Mollusc is a "{response.taxonomy}"') + # [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_internal.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_internal.py index 0c7246838947..d7dc31dc84c5 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_internal.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_internal.py @@ -56,6 +56,7 @@ def sample_classify(video, location): response = client._classify(request=request) # Handle the response - print(f"Mollusc is a \"{response.taxonomy}\"") + print(f'Mollusc is a "{response.taxonomy}"') + # [END mollusc_classify_sync_internal] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py index 3c516f9f3138..0ea41b827d35 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py @@ -56,6 +56,7 @@ def sample_classify(video, location): response = client.classify(request=request) # Handle the response - print(f"Mollusc is a \"{response.taxonomy}\"") + print(f'Mollusc is a "{response.taxonomy}"') + # [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index c937f78bddd9..5cf418ca1355 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -12,37 +12,34 @@ # See the License for the specific language governing permissions and # limitations under the License. -import jinja2 import os.path as path -import pytest +from collections import namedtuple from pathlib import Path +from textwrap import dedent +import jinja2 +import pytest from google.protobuf import json_format import gapic.utils as utils - from gapic.samplegen import samplegen -from gapic.samplegen_utils import types, utils as gapic_utils -from gapic.samplegen_utils import snippet_metadata_pb2 +from gapic.samplegen_utils import snippet_metadata_pb2, types +from gapic.samplegen_utils import utils as gapic_utils from gapic.schema import naming, wrappers from ..common_types import ( + DummyApiSchema, DummyField, + DummyIdent, DummyMessage, DummyMessageTypePB, DummyMethod, - DummyService, - DummyIdent, - DummyApiSchema, DummyNaming, + DummyService, enum_factory, message_factory, ) -from collections import namedtuple -from textwrap import dedent - - env = jinja2.Environment( loader=jinja2.FileSystemLoader( searchpath=path.realpath( diff --git a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py index 2ffad3df1efa..440175122dfc 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py @@ -12,13 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +from textwrap import dedent + import pytest import yaml -from textwrap import dedent -import gapic.samplegen_utils.yaml as gapic_yaml -import gapic.samplegen_utils.types as types import gapic.samplegen.manifest as manifest +import gapic.samplegen_utils.types as types +import gapic.samplegen_utils.yaml as gapic_yaml + from ..common_types import DummyApiSchema, DummyNaming diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index e60d03b6a45b..de19b979604c 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -12,38 +12,35 @@ # See the License for the specific language governing permissions and # limitations under the License. -import yaml -import pytest - -from textwrap import dedent -from typing import TypeVar, Sequence from collections import OrderedDict, namedtuple -from google.api import client_pb2 -from google.api import resource_pb2 -from google.protobuf import descriptor_pb2 -from google.protobuf import json_format +from textwrap import dedent +from typing import Sequence, TypeVar +import pytest +import yaml +from google.api import client_pb2, resource_pb2 +from google.protobuf import descriptor_pb2, json_format import gapic.samplegen.samplegen as samplegen import gapic.samplegen_utils.types as types import gapic.samplegen_utils.yaml as gapic_yaml -from gapic.schema import api, metadata, naming import gapic.schema.wrappers as wrappers +from gapic.samplegen_utils import utils +from gapic.schema import api, metadata, naming from gapic.utils import Options from ..common_types import ( DummyApiSchema, DummyField, DummyIdent, - DummyNaming, DummyMessage, DummyMessageTypePB, - DummyService, DummyMethod, - message_factory, + DummyNaming, + DummyService, enum_factory, + message_factory, ) -from gapic.samplegen_utils import utils @pytest.fixture(scope="module") diff --git a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py index 6fedda0dce74..6e74e3b18edb 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py @@ -14,12 +14,12 @@ import json -from google.protobuf import json_format import pytest +from google.protobuf import json_format + +from gapic.samplegen_utils import snippet_index, snippet_metadata_pb2, types -from gapic.samplegen_utils import snippet_metadata_pb2 -from gapic.samplegen_utils import snippet_index, types -from ..common_types import DummyApiSchema, DummyService, DummyMethod, DummyNaming +from ..common_types import DummyApiSchema, DummyMethod, DummyNaming, DummyService @pytest.fixture diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 270877256d43..8753c6f058c8 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -13,14 +13,16 @@ # limitations under the License. -import jinja2 import os.path as path +from textwrap import dedent + +import jinja2 + import gapic.samplegen.samplegen as samplegen import gapic.samplegen_utils.utils as sample_utils import gapic.utils as utils - from gapic.samplegen_utils.types import CallingForm -from textwrap import dedent + from .. import common_types diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 9d2dab2ec6a1..924dc8a3925e 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -16,31 +16,27 @@ import re from typing import Any, Dict, Sequence from unittest import mock -import yaml import pytest - -from google.api import annotations_pb2 # type: ignore -from google.api import client_pb2 -from google.api import field_behavior_pb2 -from google.api import field_info_pb2 -from google.api import resource_pb2 +import yaml +from google.api import ( + annotations_pb2, # type: ignore + client_pb2, + field_behavior_pb2, + field_info_pb2, + resource_pb2, +) from google.api_core import exceptions from google.cloud import extended_operations_pb2 as ex_ops_pb2 +from google.cloud.location import locations_pb2 from google.gapic.metadata import gapic_metadata_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 from google.protobuf.json_format import MessageToJson -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from gapic.schema import api -from gapic.schema import imp -from gapic.schema import mixins -from gapic.schema import naming -from gapic.schema import wrappers +from gapic.schema import api, imp, mixins, naming, wrappers from gapic.utils import Options - from test_utils.test_utils import ( make_enum_pb2, make_field, diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 0d1d1c803203..67cf0f19172c 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -16,11 +16,9 @@ from google.protobuf import descriptor_pb2 -from test_utils.test_utils import make_doc_meta - -from gapic.schema import metadata -from gapic.schema import naming +from gapic.schema import metadata, naming from gapic.utils import RESERVED_NAMES +from test_utils.test_utils import make_doc_meta def test_address_str(): diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index 08612f3bf43d..40f4f021cdd2 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -13,12 +13,10 @@ # limitations under the License. import pytest - from google.protobuf import descriptor_pb2 from gapic.schema import naming from gapic.utils import Options - from test_utils.test_utils import make_naming diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py index b5a2aff92475..7f54e14319dc 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py @@ -16,9 +16,7 @@ from google.protobuf import descriptor_pb2 -from gapic.schema import metadata -from gapic.schema import wrappers - +from gapic.schema import metadata, wrappers from test_utils.test_utils import make_enum diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 69f243ede8cd..d9ce00cbdc99 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -15,21 +15,15 @@ import collections import pytest - -from google.api import field_behavior_pb2 -from google.api import field_info_pb2 -from google.api import resource_pb2 +from google.api import field_behavior_pb2, field_info_pb2, resource_pb2 from google.cloud import extended_operations_pb2 as ex_ops_pb2 from google.protobuf import descriptor_pb2 -from gapic.schema import api -from gapic.schema import metadata -from gapic.schema import wrappers - +from gapic.schema import api, metadata, wrappers from test_utils.test_utils import ( + make_enum, make_field, make_message, - make_enum, ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index a13f62c02b9b..697972956397 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -17,16 +17,11 @@ from typing import Sequence, Tuple import pytest - -from google.api import field_behavior_pb2 -from google.api import resource_pb2 +from google.api import field_behavior_pb2, resource_pb2 from google.cloud import extended_operations_pb2 as ex_ops_pb2 from google.protobuf import descriptor_pb2 -from gapic.schema import naming -from gapic.schema import metadata -from gapic.schema import wrappers - +from gapic.schema import metadata, naming, wrappers from test_utils.test_utils import ( make_enum, make_field, diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 87eb959cb894..4fdd02c3aeb2 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -15,19 +15,14 @@ import collections import dataclasses import json -import pytest from typing import Sequence -from google.api import field_behavior_pb2 -from google.api import http_pb2 -from google.api import routing_pb2 +import pytest +from google.api import field_behavior_pb2, http_pb2, routing_pb2 from google.cloud import extended_operations_pb2 as ex_ops_pb2 -from google.protobuf import descriptor_pb2 -from google.protobuf import wrappers_pb2 - -from gapic.schema import metadata -from gapic.schema import wrappers +from google.protobuf import descriptor_pb2, wrappers_pb2 +from gapic.schema import metadata, wrappers from test_utils.test_utils import ( make_enum, make_field, diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_oneof.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_oneof.py index 90fe2546cec6..36d410877be4 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_oneof.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_oneof.py @@ -15,13 +15,10 @@ import collections import pytest - from google.api import field_behavior_pb2 from google.protobuf import descriptor_pb2 -from gapic.schema import metadata -from gapic.schema import wrappers - +from gapic.schema import metadata, wrappers from test_utils.test_utils import ( make_oneof_pb2, ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_python.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_python.py index f85e40e7c9c5..bc133d35f0c6 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_python.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_python.py @@ -14,8 +14,7 @@ import copy -from gapic.schema import metadata -from gapic.schema import wrappers +from gapic.schema import metadata, wrappers def test_python_eq(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py index 1535fae62af8..8b0d7fc2f004 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py @@ -12,12 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gapic.schema import wrappers - import json + import proto import pytest +from gapic.schema import wrappers + class RoutingTestRequest(proto.Message): table_name = proto.Field(proto.STRING, number=1) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index eb54577b2e51..a02754747d2a 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -22,7 +22,6 @@ from gapic.schema import imp from gapic.schema.wrappers import CommonResource - from test_utils.test_utils import ( get_method, make_enum,